var/home/core/zuul-output/0000755000175000017500000000000015020530056014521 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015020546366015500 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003345561115020546360017706 0ustar rootrootJun 06 09:13:06 crc systemd[1]: Starting Kubernetes Kubelet... Jun 06 09:13:06 crc restorecon[4760]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jun 06 09:13:06 crc restorecon[4760]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:07 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jun 06 09:13:08 crc restorecon[4760]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jun 06 09:13:09 crc kubenswrapper[4911]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jun 06 09:13:09 crc kubenswrapper[4911]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jun 06 09:13:09 crc kubenswrapper[4911]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jun 06 09:13:09 crc kubenswrapper[4911]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jun 06 09:13:09 crc kubenswrapper[4911]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jun 06 09:13:09 crc kubenswrapper[4911]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.376232 4911 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381256 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381300 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381305 4911 feature_gate.go:330] unrecognized feature gate: Example Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381308 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381313 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381317 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381321 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381324 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381328 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381332 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381336 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381339 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381343 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381346 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381349 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381354 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381360 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381364 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381368 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381371 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381376 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381385 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381390 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381394 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381398 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381402 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381407 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381411 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381416 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381421 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381426 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381431 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381435 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381440 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381477 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381482 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381487 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381491 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381495 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381500 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381504 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381509 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381514 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381519 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381523 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381529 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381534 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381539 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381545 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381550 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381554 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381558 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381563 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381567 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381571 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381575 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381581 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381587 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381591 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381595 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381600 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381604 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381611 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381617 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381622 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381626 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381630 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381634 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381637 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381641 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.381644 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384054 4911 flags.go:64] FLAG: --address="0.0.0.0" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384115 4911 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384125 4911 flags.go:64] FLAG: --anonymous-auth="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384131 4911 flags.go:64] FLAG: --application-metrics-count-limit="100" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384138 4911 flags.go:64] FLAG: --authentication-token-webhook="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384143 4911 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384149 4911 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384156 4911 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384161 4911 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384166 4911 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384171 4911 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384175 4911 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384179 4911 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384184 4911 flags.go:64] FLAG: --cgroup-root="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384188 4911 flags.go:64] FLAG: --cgroups-per-qos="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384193 4911 flags.go:64] FLAG: --client-ca-file="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384197 4911 flags.go:64] FLAG: --cloud-config="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384201 4911 flags.go:64] FLAG: --cloud-provider="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384205 4911 flags.go:64] FLAG: --cluster-dns="[]" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384212 4911 flags.go:64] FLAG: --cluster-domain="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384216 4911 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384221 4911 flags.go:64] FLAG: --config-dir="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384225 4911 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384230 4911 flags.go:64] FLAG: --container-log-max-files="5" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384236 4911 flags.go:64] FLAG: --container-log-max-size="10Mi" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384240 4911 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384245 4911 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384249 4911 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384255 4911 flags.go:64] FLAG: --contention-profiling="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384260 4911 flags.go:64] FLAG: --cpu-cfs-quota="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384265 4911 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384271 4911 flags.go:64] FLAG: --cpu-manager-policy="none" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384276 4911 flags.go:64] FLAG: --cpu-manager-policy-options="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384285 4911 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384290 4911 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384295 4911 flags.go:64] FLAG: --enable-debugging-handlers="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384301 4911 flags.go:64] FLAG: --enable-load-reader="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384306 4911 flags.go:64] FLAG: --enable-server="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384311 4911 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384321 4911 flags.go:64] FLAG: --event-burst="100" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384331 4911 flags.go:64] FLAG: --event-qps="50" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384338 4911 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384344 4911 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384349 4911 flags.go:64] FLAG: --eviction-hard="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384356 4911 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384361 4911 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384366 4911 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384371 4911 flags.go:64] FLAG: --eviction-soft="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384375 4911 flags.go:64] FLAG: --eviction-soft-grace-period="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384380 4911 flags.go:64] FLAG: --exit-on-lock-contention="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384385 4911 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384390 4911 flags.go:64] FLAG: --experimental-mounter-path="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384394 4911 flags.go:64] FLAG: --fail-cgroupv1="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384399 4911 flags.go:64] FLAG: --fail-swap-on="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384404 4911 flags.go:64] FLAG: --feature-gates="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384410 4911 flags.go:64] FLAG: --file-check-frequency="20s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384415 4911 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384420 4911 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384427 4911 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384432 4911 flags.go:64] FLAG: --healthz-port="10248" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384437 4911 flags.go:64] FLAG: --help="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384442 4911 flags.go:64] FLAG: --hostname-override="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384447 4911 flags.go:64] FLAG: --housekeeping-interval="10s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384452 4911 flags.go:64] FLAG: --http-check-frequency="20s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384460 4911 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384465 4911 flags.go:64] FLAG: --image-credential-provider-config="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384470 4911 flags.go:64] FLAG: --image-gc-high-threshold="85" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384476 4911 flags.go:64] FLAG: --image-gc-low-threshold="80" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384481 4911 flags.go:64] FLAG: --image-service-endpoint="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384487 4911 flags.go:64] FLAG: --kernel-memcg-notification="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384492 4911 flags.go:64] FLAG: --kube-api-burst="100" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384497 4911 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384503 4911 flags.go:64] FLAG: --kube-api-qps="50" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384508 4911 flags.go:64] FLAG: --kube-reserved="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384514 4911 flags.go:64] FLAG: --kube-reserved-cgroup="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384519 4911 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384524 4911 flags.go:64] FLAG: --kubelet-cgroups="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384529 4911 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384533 4911 flags.go:64] FLAG: --lock-file="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384537 4911 flags.go:64] FLAG: --log-cadvisor-usage="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384542 4911 flags.go:64] FLAG: --log-flush-frequency="5s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384546 4911 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384554 4911 flags.go:64] FLAG: --log-json-split-stream="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384561 4911 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384571 4911 flags.go:64] FLAG: --log-text-split-stream="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384576 4911 flags.go:64] FLAG: --logging-format="text" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384582 4911 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384588 4911 flags.go:64] FLAG: --make-iptables-util-chains="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384593 4911 flags.go:64] FLAG: --manifest-url="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384598 4911 flags.go:64] FLAG: --manifest-url-header="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384606 4911 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384612 4911 flags.go:64] FLAG: --max-open-files="1000000" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384619 4911 flags.go:64] FLAG: --max-pods="110" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384623 4911 flags.go:64] FLAG: --maximum-dead-containers="-1" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384630 4911 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384634 4911 flags.go:64] FLAG: --memory-manager-policy="None" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384640 4911 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384646 4911 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384650 4911 flags.go:64] FLAG: --node-ip="192.168.126.11" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384655 4911 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384670 4911 flags.go:64] FLAG: --node-status-max-images="50" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384675 4911 flags.go:64] FLAG: --node-status-update-frequency="10s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384681 4911 flags.go:64] FLAG: --oom-score-adj="-999" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384686 4911 flags.go:64] FLAG: --pod-cidr="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384691 4911 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384700 4911 flags.go:64] FLAG: --pod-manifest-path="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384706 4911 flags.go:64] FLAG: --pod-max-pids="-1" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384712 4911 flags.go:64] FLAG: --pods-per-core="0" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384716 4911 flags.go:64] FLAG: --port="10250" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384722 4911 flags.go:64] FLAG: --protect-kernel-defaults="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384726 4911 flags.go:64] FLAG: --provider-id="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384731 4911 flags.go:64] FLAG: --qos-reserved="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384737 4911 flags.go:64] FLAG: --read-only-port="10255" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384743 4911 flags.go:64] FLAG: --register-node="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384748 4911 flags.go:64] FLAG: --register-schedulable="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384753 4911 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384762 4911 flags.go:64] FLAG: --registry-burst="10" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384768 4911 flags.go:64] FLAG: --registry-qps="5" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384773 4911 flags.go:64] FLAG: --reserved-cpus="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384778 4911 flags.go:64] FLAG: --reserved-memory="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384785 4911 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384791 4911 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384796 4911 flags.go:64] FLAG: --rotate-certificates="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384801 4911 flags.go:64] FLAG: --rotate-server-certificates="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384805 4911 flags.go:64] FLAG: --runonce="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384809 4911 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384814 4911 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384818 4911 flags.go:64] FLAG: --seccomp-default="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384823 4911 flags.go:64] FLAG: --serialize-image-pulls="true" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384827 4911 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384832 4911 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384838 4911 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384844 4911 flags.go:64] FLAG: --storage-driver-password="root" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384849 4911 flags.go:64] FLAG: --storage-driver-secure="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384854 4911 flags.go:64] FLAG: --storage-driver-table="stats" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384860 4911 flags.go:64] FLAG: --storage-driver-user="root" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384865 4911 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384870 4911 flags.go:64] FLAG: --sync-frequency="1m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384877 4911 flags.go:64] FLAG: --system-cgroups="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384882 4911 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384890 4911 flags.go:64] FLAG: --system-reserved-cgroup="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384895 4911 flags.go:64] FLAG: --tls-cert-file="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384900 4911 flags.go:64] FLAG: --tls-cipher-suites="[]" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384908 4911 flags.go:64] FLAG: --tls-min-version="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384913 4911 flags.go:64] FLAG: --tls-private-key-file="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384918 4911 flags.go:64] FLAG: --topology-manager-policy="none" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384922 4911 flags.go:64] FLAG: --topology-manager-policy-options="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384927 4911 flags.go:64] FLAG: --topology-manager-scope="container" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384933 4911 flags.go:64] FLAG: --v="2" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384941 4911 flags.go:64] FLAG: --version="false" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384948 4911 flags.go:64] FLAG: --vmodule="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384955 4911 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.384960 4911 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385111 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385119 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385124 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385129 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385134 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385139 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385145 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385150 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385154 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385159 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385165 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385171 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385176 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385181 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385186 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385190 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385196 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385201 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385206 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385210 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385215 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385219 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385224 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385228 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385232 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385236 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385241 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385245 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385250 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385254 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385258 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385264 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385269 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385275 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385283 4911 feature_gate.go:330] unrecognized feature gate: Example Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385289 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385295 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385300 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385305 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385311 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385316 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385320 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385325 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385329 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385334 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385338 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385342 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385347 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385357 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385361 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385365 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385368 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385372 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385376 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385379 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385383 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385386 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385389 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385393 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385396 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385400 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385403 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385408 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385413 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385417 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385421 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385425 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385428 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385432 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385435 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.385439 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.385483 4911 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.396367 4911 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.396415 4911 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396481 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396489 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396493 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396497 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396501 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396505 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396508 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396512 4911 feature_gate.go:330] unrecognized feature gate: Example Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396516 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396520 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396524 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396527 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396531 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396535 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396543 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396547 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396550 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396554 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396558 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396562 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396567 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396571 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396575 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396578 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396582 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396586 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396589 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396593 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396597 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396601 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396605 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396609 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396612 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396616 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396619 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396623 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396626 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396630 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396633 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396637 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396640 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396643 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396647 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396650 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396654 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396658 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396662 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396667 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396671 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396676 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396680 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396683 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396687 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396690 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396694 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396698 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396701 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396704 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396708 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396711 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396715 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396718 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396722 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396727 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396730 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396734 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396737 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396741 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396744 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396748 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396752 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.396759 4911 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396869 4911 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396875 4911 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396879 4911 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396883 4911 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396888 4911 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396891 4911 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396895 4911 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396898 4911 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396902 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396906 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396909 4911 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396913 4911 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396920 4911 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396925 4911 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396928 4911 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396931 4911 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396935 4911 feature_gate.go:330] unrecognized feature gate: NewOLM Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396938 4911 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396942 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396945 4911 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396949 4911 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396952 4911 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396956 4911 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396960 4911 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396964 4911 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396970 4911 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396973 4911 feature_gate.go:330] unrecognized feature gate: SignatureStores Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396977 4911 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396981 4911 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396986 4911 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396990 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396994 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.396998 4911 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397001 4911 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397005 4911 feature_gate.go:330] unrecognized feature gate: OVNObservability Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397009 4911 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397012 4911 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397015 4911 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397019 4911 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397022 4911 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397026 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397029 4911 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397033 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397036 4911 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397040 4911 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397044 4911 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397048 4911 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397052 4911 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397056 4911 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397060 4911 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397064 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397068 4911 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397072 4911 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397077 4911 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397081 4911 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397085 4911 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397108 4911 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397113 4911 feature_gate.go:330] unrecognized feature gate: PinnedImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397116 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397120 4911 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397124 4911 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397127 4911 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397131 4911 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397135 4911 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397138 4911 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397142 4911 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397145 4911 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397149 4911 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397153 4911 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397156 4911 feature_gate.go:330] unrecognized feature gate: Example Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.397159 4911 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.397165 4911 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.397923 4911 server.go:940] "Client rotation is on, will bootstrap in background" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.403841 4911 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.403965 4911 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.405346 4911 server.go:997] "Starting client certificate rotation" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.405373 4911 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.405642 4911 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-16 01:34:36.683018401 +0000 UTC Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.405741 4911 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 3904h21m27.277280141s for next certificate rotation Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.444234 4911 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.449268 4911 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.487432 4911 log.go:25] "Validated CRI v1 runtime API" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.580894 4911 log.go:25] "Validated CRI v1 image API" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.583070 4911 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.604611 4911 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-06-06-09-08-03-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.604656 4911 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.643180 4911 manager.go:217] Machine: {Timestamp:2025-06-06 09:13:09.619915977 +0000 UTC m=+0.895341540 CPUVendorID:AuthenticAMD NumCores:16 NumPhysicalCores:1 NumSockets:16 CpuFrequency:2800000 MemoryCapacity:50514153472 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:974e13a0-8ce3-4941-9dfe-9f41a7608944 BootID:e8cec969-c284-4e67-8aad-4eaa67efeecc Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:5051412480 Type:vfs Inodes:1233255 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:6166278 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:25257074688 Type:vfs Inodes:6166278 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:10102833152 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:25257078784 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e1:86:8d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e1:86:8d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5c:f9:dc Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:ce:63:da Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f7:a9:7f Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:d2:2f:2e Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ea:52:a9:41:64:83 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:d2:2d:5e:c1:22:a1 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:50514153472 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[12] Caches:[{Id:12 Size:32768 Type:Data Level:1} {Id:12 Size:32768 Type:Instruction Level:1} {Id:12 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:12 Size:16777216 Type:Unified Level:3}] SocketID:12 BookID: DrawerID:} {Id:0 Threads:[13] Caches:[{Id:13 Size:32768 Type:Data Level:1} {Id:13 Size:32768 Type:Instruction Level:1} {Id:13 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:13 Size:16777216 Type:Unified Level:3}] SocketID:13 BookID: DrawerID:} {Id:0 Threads:[14] Caches:[{Id:14 Size:32768 Type:Data Level:1} {Id:14 Size:32768 Type:Instruction Level:1} {Id:14 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:14 Size:16777216 Type:Unified Level:3}] SocketID:14 BookID: DrawerID:} {Id:0 Threads:[15] Caches:[{Id:15 Size:32768 Type:Data Level:1} {Id:15 Size:32768 Type:Instruction Level:1} {Id:15 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:15 Size:16777216 Type:Unified Level:3}] SocketID:15 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.643536 4911 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.643858 4911 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.645436 4911 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.645837 4911 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.645944 4911 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.646858 4911 topology_manager.go:138] "Creating topology manager with none policy" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.646876 4911 container_manager_linux.go:303] "Creating device plugin manager" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.647506 4911 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.647551 4911 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.647991 4911 state_mem.go:36] "Initialized new in-memory state store" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.648124 4911 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.705981 4911 kubelet.go:418] "Attempting to sync node with API server" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.706038 4911 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.706063 4911 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.706080 4911 kubelet.go:324] "Adding apiserver pod source" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.706119 4911 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.710409 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.710405 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.710547 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.710581 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.710743 4911 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.712906 4911 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.779909 4911 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782127 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782185 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782194 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782204 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782219 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782229 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782237 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782261 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782272 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782281 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782315 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782324 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782359 4911 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.782948 4911 server.go:1280] "Started kubelet" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.783162 4911 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.783324 4911 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.784242 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.784410 4911 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jun 06 09:13:09 crc systemd[1]: Started Kubernetes Kubelet. Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.801402 4911 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.801529 4911 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.801787 4911 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 09:55:05.83692 +0000 UTC Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.801866 4911 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 4224h41m56.035057328s for next certificate rotation Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.802212 4911 volume_manager.go:287] "The desired_state_of_world populator starts" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.802227 4911 volume_manager.go:289] "Starting Kubelet Volume Manager" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.802404 4911 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.802591 4911 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.803823 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.803979 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.803719 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.108:6443: connect: connection refused" interval="200ms" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.805888 4911 server.go:460] "Adding debug handlers to kubelet server" Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.807732 4911 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.108:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18466936e5b3669d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-06-06 09:13:09.782906525 +0000 UTC m=+1.058332068,LastTimestamp:2025-06-06 09:13:09.782906525 +0000 UTC m=+1.058332068,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.816436 4911 factory.go:153] Registering CRI-O factory Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.816735 4911 factory.go:221] Registration of the crio container factory successfully Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.816914 4911 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.816994 4911 factory.go:55] Registering systemd factory Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.817068 4911 factory.go:221] Registration of the systemd container factory successfully Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.817359 4911 factory.go:103] Registering Raw factory Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.817511 4911 manager.go:1196] Started watching for new ooms in manager Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.820243 4911 manager.go:319] Starting recovery of all containers Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841427 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841520 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841532 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841545 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841558 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841568 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841578 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841595 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841609 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841620 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841630 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841644 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841654 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841670 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841682 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841693 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841709 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841719 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841728 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841741 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841753 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841765 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841776 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841790 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841845 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841860 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841879 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841891 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841904 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841916 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841956 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841970 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841984 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.841998 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842010 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842022 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842035 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842054 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842067 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842080 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842108 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842121 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842134 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842147 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842158 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842173 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842183 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842194 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842206 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842217 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842229 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842242 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842259 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842340 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842353 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842364 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842399 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842411 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842424 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842435 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842450 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.842463 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843158 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843264 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843296 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843315 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843332 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843349 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843367 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843383 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843401 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843417 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843431 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843453 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843470 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843489 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843505 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843521 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843537 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843553 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843571 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843586 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.843601 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858675 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858713 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858752 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858771 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858789 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858806 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858826 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858843 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858867 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858894 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858914 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858941 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858963 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.858993 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.859011 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.859036 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.859058 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.859079 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.859112 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.859129 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.859635 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.860915 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.860953 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.860980 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.861015 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.861570 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.861594 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.861616 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.861661 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.861681 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.861696 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.862579 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.862621 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.862640 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.862669 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.862687 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.862861 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.862956 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863149 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863232 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863259 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863279 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863309 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863341 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863369 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863517 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863548 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863565 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863586 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863602 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863616 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863629 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863652 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863710 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863729 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863742 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863761 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863775 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863789 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863806 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863849 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863887 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863911 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.863927 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864012 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864031 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864045 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864060 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864074 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864129 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864146 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864159 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864176 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864190 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.864218 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.868384 4911 manager.go:324] Recovery completed Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877043 4911 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877179 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877209 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877224 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877238 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877251 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877262 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877274 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877286 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877304 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877340 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877360 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877377 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877392 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877406 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877423 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877484 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877499 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877540 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877554 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877566 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877577 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877588 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877601 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877614 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877627 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877649 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877676 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877700 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877715 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877730 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877764 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877785 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877801 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877817 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877831 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877845 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877864 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877878 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877895 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877913 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877929 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877945 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877959 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877975 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.877990 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.878006 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.878020 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.878050 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.878064 4911 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.878077 4911 reconstruct.go:97] "Volume reconstruction finished" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.878111 4911 reconciler.go:26] "Reconciler: start to sync state" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.880410 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.885720 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.885769 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.885781 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.886813 4911 cpu_manager.go:225] "Starting CPU manager" policy="none" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.886834 4911 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.886856 4911 state_mem.go:36] "Initialized new in-memory state store" Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.903409 4911 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.944257 4911 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.946474 4911 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.946542 4911 status_manager.go:217] "Starting to sync pod status with apiserver" Jun 06 09:13:09 crc kubenswrapper[4911]: I0606 09:13:09.946596 4911 kubelet.go:2335] "Starting kubelet main sync loop" Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.946663 4911 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jun 06 09:13:09 crc kubenswrapper[4911]: W0606 09:13:09.991115 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:09 crc kubenswrapper[4911]: E0606 09:13:09.991252 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.004176 4911 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.004866 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.108:6443: connect: connection refused" interval="400ms" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.047070 4911 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.089809 4911 policy_none.go:49] "None policy: Start" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.091551 4911 memory_manager.go:170] "Starting memorymanager" policy="None" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.091603 4911 state_mem.go:35] "Initializing new in-memory state store" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.104482 4911 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.250343 4911 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.250444 4911 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.251642 4911 manager.go:334] "Starting Device Plugin manager" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.251704 4911 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.251717 4911 server.go:79] "Starting device plugin registration server" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.252253 4911 eviction_manager.go:189] "Eviction manager: starting control loop" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.252273 4911 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.252473 4911 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.252579 4911 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.252590 4911 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.266623 4911 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.353183 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.354766 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.354818 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.354834 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.354868 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.355459 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.108:6443: connect: connection refused" node="crc" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.406549 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.108:6443: connect: connection refused" interval="800ms" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.556592 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.558471 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.558522 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.558534 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.558568 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.559149 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.108:6443: connect: connection refused" node="crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.651651 4911 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.651810 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.653495 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.653551 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.653570 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.653722 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654138 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654215 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654451 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654488 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654501 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654639 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654840 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.654929 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.655541 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.655584 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.655592 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.655628 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.655640 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.655598 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.655867 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656011 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656053 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656614 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656644 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656653 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656870 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656890 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.656901 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.657030 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.657166 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.657203 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.657219 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.657249 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.657259 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658421 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658450 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658457 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658482 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658496 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658463 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658676 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.658702 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.659358 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.659390 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.659404 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756218 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756308 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756332 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756412 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756479 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756500 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756518 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756538 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756630 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756697 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756768 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756826 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756860 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756890 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.756957 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.785689 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:10 crc kubenswrapper[4911]: W0606 09:13:10.809546 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.809642 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858390 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858480 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858512 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858545 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858595 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858602 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858624 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858681 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858664 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858694 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858651 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858694 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858770 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858814 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858849 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858887 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858899 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.858967 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859010 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859038 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859057 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859143 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859189 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859201 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859194 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859230 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859232 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859253 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.859259 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.959975 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.961024 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.961078 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.961087 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.961144 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jun 06 09:13:10 crc kubenswrapper[4911]: E0606 09:13:10.961686 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.108:6443: connect: connection refused" node="crc" Jun 06 09:13:10 crc kubenswrapper[4911]: I0606 09:13:10.981003 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.007690 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.016333 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.032855 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.038525 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.045731 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:11 crc kubenswrapper[4911]: E0606 09:13:11.045889 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.076348 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-acc0b71b4674aa470bdf01347f7eeb2e96075bf464cb704143ba40eeee25992b WatchSource:0}: Error finding container acc0b71b4674aa470bdf01347f7eeb2e96075bf464cb704143ba40eeee25992b: Status 404 returned error can't find the container with id acc0b71b4674aa470bdf01347f7eeb2e96075bf464cb704143ba40eeee25992b Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.079130 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-58590aaf2014557dea9b8e6a680a071a1d83d62ac238d0cea453baaa5655da4c WatchSource:0}: Error finding container 58590aaf2014557dea9b8e6a680a071a1d83d62ac238d0cea453baaa5655da4c: Status 404 returned error can't find the container with id 58590aaf2014557dea9b8e6a680a071a1d83d62ac238d0cea453baaa5655da4c Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.081679 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-509d7f42c9b75e4a9b4eced81254db7dab237f75adfccd9b7a6d3750529d7490 WatchSource:0}: Error finding container 509d7f42c9b75e4a9b4eced81254db7dab237f75adfccd9b7a6d3750529d7490: Status 404 returned error can't find the container with id 509d7f42c9b75e4a9b4eced81254db7dab237f75adfccd9b7a6d3750529d7490 Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.083072 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-5825616f56010a9ed94c4ba52b4bf24bc9a5e0e8e1ff6b3ea10a2c961c882d19 WatchSource:0}: Error finding container 5825616f56010a9ed94c4ba52b4bf24bc9a5e0e8e1ff6b3ea10a2c961c882d19: Status 404 returned error can't find the container with id 5825616f56010a9ed94c4ba52b4bf24bc9a5e0e8e1ff6b3ea10a2c961c882d19 Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.084929 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-700152814d2711e07e44fda2c06cf30a771782ec1edcb208c328378621acf1af WatchSource:0}: Error finding container 700152814d2711e07e44fda2c06cf30a771782ec1edcb208c328378621acf1af: Status 404 returned error can't find the container with id 700152814d2711e07e44fda2c06cf30a771782ec1edcb208c328378621acf1af Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.122581 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:11 crc kubenswrapper[4911]: E0606 09:13:11.122697 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:11 crc kubenswrapper[4911]: E0606 09:13:11.208676 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.108:6443: connect: connection refused" interval="1.6s" Jun 06 09:13:11 crc kubenswrapper[4911]: W0606 09:13:11.402073 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:11 crc kubenswrapper[4911]: E0606 09:13:11.402223 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.762663 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.764786 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.764834 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.764849 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.764890 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jun 06 09:13:11 crc kubenswrapper[4911]: E0606 09:13:11.765578 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.108:6443: connect: connection refused" node="crc" Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.786177 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.953059 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5825616f56010a9ed94c4ba52b4bf24bc9a5e0e8e1ff6b3ea10a2c961c882d19"} Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.954112 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"509d7f42c9b75e4a9b4eced81254db7dab237f75adfccd9b7a6d3750529d7490"} Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.954929 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"58590aaf2014557dea9b8e6a680a071a1d83d62ac238d0cea453baaa5655da4c"} Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.957187 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"acc0b71b4674aa470bdf01347f7eeb2e96075bf464cb704143ba40eeee25992b"} Jun 06 09:13:11 crc kubenswrapper[4911]: I0606 09:13:11.958015 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"700152814d2711e07e44fda2c06cf30a771782ec1edcb208c328378621acf1af"} Jun 06 09:13:12 crc kubenswrapper[4911]: W0606 09:13:12.674969 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:12 crc kubenswrapper[4911]: E0606 09:13:12.675067 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.785669 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:12 crc kubenswrapper[4911]: E0606 09:13:12.809667 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.108:6443: connect: connection refused" interval="3.2s" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.963665 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b15abca82d86de350ba737669bcd8cb02538b606020f11b03a3f5afe59f78902"} Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.963730 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"13e5bc7c99a922b6cba6399b1103ef954da51a15f66900b7e2ae7cf878198a3e"} Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.963743 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d41ff01e4682b5a69716aa2f924343168b0d422cb87f64962a4a8415e28f6502"} Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.965169 4911 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5dd427adcb09d4d98a6b9f5005549745cd562b83e5a05045f1d52408d6b75829" exitCode=0 Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.965888 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.965264 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5dd427adcb09d4d98a6b9f5005549745cd562b83e5a05045f1d52408d6b75829"} Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.971356 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.971404 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.971418 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.971452 4911 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf" exitCode=0 Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.971560 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf"} Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.971693 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.972793 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.972818 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.972827 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.973497 4911 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="15b9b76f371b45b81ba79eb915a7b175199148d763d0d770caeb80d83c36e188" exitCode=0 Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.973544 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"15b9b76f371b45b81ba79eb915a7b175199148d763d0d770caeb80d83c36e188"} Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.973602 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.974337 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.974687 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.974716 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.974727 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.975482 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.975507 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.975518 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.975520 4911 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="856ea39d4a4786059238780b9d0390a883ad0ce7dd5d329ba1b1621760aa18fd" exitCode=0 Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.975556 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"856ea39d4a4786059238780b9d0390a883ad0ce7dd5d329ba1b1621760aa18fd"} Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.975638 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.976344 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.976386 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:12 crc kubenswrapper[4911]: I0606 09:13:12.976399 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.366358 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.368551 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.368621 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.368636 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.368672 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jun 06 09:13:13 crc kubenswrapper[4911]: E0606 09:13:13.369671 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.108:6443: connect: connection refused" node="crc" Jun 06 09:13:13 crc kubenswrapper[4911]: W0606 09:13:13.667380 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:13 crc kubenswrapper[4911]: E0606 09:13:13.667676 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.785978 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:13 crc kubenswrapper[4911]: W0606 09:13:13.907875 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:13 crc kubenswrapper[4911]: E0606 09:13:13.907969 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.980716 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8502612295e029df9a568de1c9352e07ae29c07f77f75fc79177276d0755bd1e"} Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.980808 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.986378 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.986436 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.986449 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.989116 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"fd7e95983faa6ab8c768606686277c3a477f45a9e200bc591b21947b15db46db"} Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.989177 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"083aa03fdca016b3a57847070688a5aa6588d0656f31ba971d0cdd4ee9801663"} Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.989195 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0cb48c47558895ed180a139351dbe01609b2c3acaab6f7b0e319f6f9c716dda1"} Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.989269 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.991034 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.991085 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.991110 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.993039 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d9cc86f01eca8790a60220b27da58a2b95a06781836be78cf057dc6247d78867"} Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.993112 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.994589 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.994631 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.994648 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.995814 4911 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8651eae34d17a37d2d7319f6512fe4f4e04486f19803d6b0e88e073332a755b3" exitCode=0 Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.995894 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8651eae34d17a37d2d7319f6512fe4f4e04486f19803d6b0e88e073332a755b3"} Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.995965 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.997623 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.997686 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:13 crc kubenswrapper[4911]: I0606 09:13:13.997705 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:14 crc kubenswrapper[4911]: I0606 09:13:14.003585 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4b5bb96a7312069f50f9bed423e78da3db154b4e2203a83b674bd662dab8a5e6"} Jun 06 09:13:14 crc kubenswrapper[4911]: I0606 09:13:14.003669 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4b702e65cf65dbec8da9125a823c877a6bfaf8745b0e4292dc2a4f886b2e9203"} Jun 06 09:13:14 crc kubenswrapper[4911]: I0606 09:13:14.003689 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"46a3fc66d21cb1eba9814737fce0deeb40ace097774c06f788f2c4c79a321225"} Jun 06 09:13:14 crc kubenswrapper[4911]: I0606 09:13:14.003702 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0b3ade80b67b75dc3f26a44ebadcbfaacea0e49c4f3aeac569f467a441b42716"} Jun 06 09:13:14 crc kubenswrapper[4911]: W0606 09:13:14.017018 4911 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.108:6443: connect: connection refused Jun 06 09:13:14 crc kubenswrapper[4911]: E0606 09:13:14.017142 4911 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.108:6443: connect: connection refused" logger="UnhandledError" Jun 06 09:13:14 crc kubenswrapper[4911]: I0606 09:13:14.139612 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.010310 4911 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="76da895e48e9c816ffff0ff508165fa8da06a6cdb97d2c0e8fb582009910f881" exitCode=0 Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.010387 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"76da895e48e9c816ffff0ff508165fa8da06a6cdb97d2c0e8fb582009910f881"} Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.010560 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.011728 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.011760 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.011773 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.014932 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756"} Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.015013 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.015046 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.015106 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.015214 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.015489 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016406 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016438 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016450 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016459 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016482 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016492 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016564 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016574 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.016591 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.017214 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.017326 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.017472 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:15 crc kubenswrapper[4911]: I0606 09:13:15.226459 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023670 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"76ca78f62915a82a003cc2c8589d0b07cc51336f3171f1ebe4d54fc875b87a81"} Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023722 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023730 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023748 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023726 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"43cc5e5126421d7624403d7765f9be38f059bed540c8f227ae2e2de3246301b3"} Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023777 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1c5362beb868098bcc07fc08d2c891a6483b809f58da069a8384044d89f81fb8"} Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"34b1013751354a6f3123c107632afc104d2fbec4653eea168b2cc82df444299e"} Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.023838 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024729 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024757 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024765 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024831 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024856 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024866 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024866 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024903 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.024912 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.058445 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.435605 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.570175 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.571826 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.571881 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.571899 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:16 crc kubenswrapper[4911]: I0606 09:13:16.571934 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.031159 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e40cdf7aec681971e00de2076038d09a7ef689123b02ae1a3677f8623462cef3"} Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.031228 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.031228 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.031421 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.032244 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.032284 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.032297 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.032987 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.033025 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.033067 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.033076 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.033044 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.033152 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.037956 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.057655 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:17 crc kubenswrapper[4911]: I0606 09:13:17.562464 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.033852 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.033870 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.034050 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.034906 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.034941 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.034952 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.034979 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.035005 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.035016 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.035294 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.035320 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.035340 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.553249 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:18 crc kubenswrapper[4911]: I0606 09:13:18.936539 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.036868 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.036927 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.036868 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038518 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038555 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038559 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038563 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038589 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038668 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038709 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038726 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.038685 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.436453 4911 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jun 06 09:13:19 crc kubenswrapper[4911]: I0606 09:13:19.436574 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jun 06 09:13:20 crc kubenswrapper[4911]: E0606 09:13:20.267234 4911 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.145937 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.146126 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.147700 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.147763 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.147775 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.179867 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.180062 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.181395 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.181447 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.181462 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:24 crc kubenswrapper[4911]: I0606 09:13:24.786962 4911 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.055040 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.057140 4911 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756" exitCode=255 Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.057187 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756"} Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.057403 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.058304 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.058335 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.058344 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.058902 4911 scope.go:117] "RemoveContainer" containerID="1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756" Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.970904 4911 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.970986 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.981247 4911 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jun 06 09:13:25 crc kubenswrapper[4911]: I0606 09:13:25.981357 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jun 06 09:13:26 crc kubenswrapper[4911]: I0606 09:13:26.061575 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jun 06 09:13:26 crc kubenswrapper[4911]: I0606 09:13:26.063600 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f"} Jun 06 09:13:26 crc kubenswrapper[4911]: I0606 09:13:26.063855 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:26 crc kubenswrapper[4911]: I0606 09:13:26.064871 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:26 crc kubenswrapper[4911]: I0606 09:13:26.064904 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:26 crc kubenswrapper[4911]: I0606 09:13:26.064914 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:28 crc kubenswrapper[4911]: I0606 09:13:28.558553 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:28 crc kubenswrapper[4911]: I0606 09:13:28.558719 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:28 crc kubenswrapper[4911]: I0606 09:13:28.558818 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:28 crc kubenswrapper[4911]: I0606 09:13:28.560128 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:28 crc kubenswrapper[4911]: I0606 09:13:28.560193 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:28 crc kubenswrapper[4911]: I0606 09:13:28.560214 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:28 crc kubenswrapper[4911]: I0606 09:13:28.562680 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:29 crc kubenswrapper[4911]: I0606 09:13:29.071379 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:29 crc kubenswrapper[4911]: I0606 09:13:29.072272 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:29 crc kubenswrapper[4911]: I0606 09:13:29.072312 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:29 crc kubenswrapper[4911]: I0606 09:13:29.072324 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:29 crc kubenswrapper[4911]: I0606 09:13:29.435972 4911 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jun 06 09:13:29 crc kubenswrapper[4911]: I0606 09:13:29.436084 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jun 06 09:13:30 crc kubenswrapper[4911]: I0606 09:13:30.074084 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:30 crc kubenswrapper[4911]: I0606 09:13:30.075107 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:30 crc kubenswrapper[4911]: I0606 09:13:30.075136 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:30 crc kubenswrapper[4911]: I0606 09:13:30.075150 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:30 crc kubenswrapper[4911]: E0606 09:13:30.267989 4911 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jun 06 09:13:30 crc kubenswrapper[4911]: E0606 09:13:30.955181 4911 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.040021 4911 trace.go:236] Trace[1558975881]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Jun-2025 09:13:17.491) (total time: 13548ms): Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[1558975881]: ---"Objects listed" error: 13548ms (09:13:31.039) Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[1558975881]: [13.548749211s] [13.548749211s] END Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.040106 4911 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.040027 4911 trace.go:236] Trace[1789531410]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Jun-2025 09:13:17.591) (total time: 13365ms): Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[1789531410]: ---"Objects listed" error: 13365ms (09:13:30.957) Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[1789531410]: [13.365503852s] [13.365503852s] END Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.040642 4911 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.045206 4911 trace.go:236] Trace[513165438]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Jun-2025 09:13:18.189) (total time: 12855ms): Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[513165438]: ---"Objects listed" error: 12855ms (09:13:31.045) Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[513165438]: [12.855606601s] [12.855606601s] END Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.045240 4911 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.045353 4911 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.063345 4911 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.063488 4911 trace.go:236] Trace[546312452]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Jun-2025 09:13:17.895) (total time: 13168ms): Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[546312452]: ---"Objects listed" error: 13168ms (09:13:31.063) Jun 06 09:13:31 crc kubenswrapper[4911]: Trace[546312452]: [13.168413253s] [13.168413253s] END Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.063517 4911 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.718584 4911 apiserver.go:52] "Watching apiserver" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.749370 4911 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.749654 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.750224 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.750364 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.750456 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.750522 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.750640 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.750752 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.750793 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.750934 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.751050 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.753187 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.753192 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.753331 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.753355 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.753184 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.754313 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.754331 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.754443 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.754538 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.796044 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.803123 4911 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.824236 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.850407 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.850770 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.850869 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.850963 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851116 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851358 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851461 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851549 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851635 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851735 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851823 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851920 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852000 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852081 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852209 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852285 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852353 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852427 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852509 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.850888 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851130 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851311 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.851913 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853570 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852286 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.852528 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:32.352505961 +0000 UTC m=+23.627931504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852762 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.852953 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853081 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853145 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853278 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853792 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853345 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853883 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853375 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853382 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.853387 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.854039 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.854273 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.854697 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.854810 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.854886 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.854900 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855029 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855108 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855180 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855260 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855341 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855792 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855903 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856267 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856384 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856564 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856712 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856790 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856858 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856941 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857018 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855375 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855456 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855491 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855697 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855791 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.855795 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856038 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856170 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856517 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856609 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.856778 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857013 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857061 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857215 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857441 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857586 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857589 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857673 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857700 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857724 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857746 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857764 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857784 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857806 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857840 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857866 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857948 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.857975 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858008 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858030 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858051 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858074 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858128 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858151 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858182 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858212 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858237 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858264 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858282 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858286 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858342 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858367 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858386 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858404 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858421 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858443 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858489 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858500 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858520 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858540 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858559 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858583 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858602 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858620 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858636 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858659 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858678 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858696 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858714 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858735 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858757 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858768 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858780 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858807 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858835 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858863 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858883 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858916 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858933 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858949 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858965 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.858980 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859001 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859020 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859037 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859056 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859073 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859104 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859125 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859141 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859162 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859169 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859180 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859198 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859235 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859253 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859271 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859287 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859305 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859320 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859336 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859347 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859352 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859387 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859407 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859425 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859444 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859461 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859507 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859525 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859541 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859541 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859556 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859576 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859595 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859615 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859647 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859663 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859663 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859679 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859701 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859706 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859745 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859769 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859785 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859802 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859828 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859846 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859861 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859877 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859886 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859894 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859921 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859945 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859966 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.859991 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860017 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860034 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860052 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860068 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860077 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860085 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860119 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860136 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860154 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860174 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860192 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860208 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860223 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860239 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860256 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860274 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860291 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860309 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860325 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860344 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860361 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860380 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860395 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860411 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860428 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860445 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860461 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860478 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860494 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860511 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860528 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860545 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860561 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860578 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860595 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860613 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860630 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860647 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860670 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860686 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860703 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860703 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860726 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860756 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860782 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860806 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860832 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860854 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860877 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860901 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860924 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860923 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860948 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860952 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.860997 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861022 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861044 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861064 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861087 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861147 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861170 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861175 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861289 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861342 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861391 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861400 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861436 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861473 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861504 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861518 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861544 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861587 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861620 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861647 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861674 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861695 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861697 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861752 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861778 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861780 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861804 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861829 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861925 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861938 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861949 4911 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861961 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861971 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861981 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861982 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.861991 4911 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862004 4911 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862016 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862026 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862037 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862048 4911 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862057 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862060 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862110 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862122 4911 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862136 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862148 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862190 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862194 4911 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862241 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862256 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862272 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862286 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862297 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862309 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862319 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862331 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862342 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862353 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862363 4911 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862374 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862385 4911 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862397 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862408 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862421 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862433 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862442 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862452 4911 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862462 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862475 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862485 4911 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862496 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862508 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862518 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862530 4911 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862542 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862554 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862565 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862576 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862482 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862662 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862735 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862826 4911 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862951 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862961 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.862982 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.863220 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.863270 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.863866 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.863922 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.864116 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.864257 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.864438 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.864515 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.864677 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.864808 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.864917 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.865150 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.865200 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.865586 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.865943 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.866492 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.866807 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.867079 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.867564 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.867901 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.868252 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.868325 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.868346 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.868368 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.868416 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.868666 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.868726 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.869003 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.869039 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.869066 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.869477 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.869494 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.869719 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.873450 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.874040 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.874327 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.874886 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.875239 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.875992 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.876081 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.876368 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.876675 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877007 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877107 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877415 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877419 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877805 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877853 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.877898 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.878236 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.878513 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.878603 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:32.378556409 +0000 UTC m=+23.653981962 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.878676 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.878963 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.879159 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.879254 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.879349 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:32.379336506 +0000 UTC m=+23.654762049 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.880779 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.880805 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.881320 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.881527 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.881948 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882126 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882149 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882572 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882790 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882852 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882210 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.883243 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882165 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.882220 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.883539 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.883588 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.883778 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.883829 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884139 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884190 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884287 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884644 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884652 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884750 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884790 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.884914 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.885163 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.885644 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.885743 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.886017 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.886200 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.886543 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.886677 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.886981 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.887104 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.887827 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.888214 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.888416 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.888669 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.888943 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.889513 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.890876 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.891517 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.892435 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.892507 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892654 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892681 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892696 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892805 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:32.39277523 +0000 UTC m=+23.668200953 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892816 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892859 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892878 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:31 crc kubenswrapper[4911]: E0606 09:13:31.892959 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:32.392931303 +0000 UTC m=+23.668357056 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.893502 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.895655 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.896155 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.896209 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.897153 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.897297 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.897567 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.897882 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.897922 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.897569 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.898032 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.898111 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.899317 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.902350 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.903623 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.903831 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.904295 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.905404 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.905430 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.905431 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.905428 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.905503 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.905662 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906129 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906047 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906183 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906162 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906425 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906534 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906451 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906452 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906757 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906792 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.906882 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.907747 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.908336 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.910020 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.910331 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.923631 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.936205 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.950767 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.951687 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.952120 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.952828 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.954211 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.954824 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.956462 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.957805 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.958890 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964085 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964205 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964298 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964315 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964327 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964339 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964355 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964366 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964378 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964389 4911 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964403 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964413 4911 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964425 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964436 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964450 4911 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964461 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964472 4911 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964488 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964499 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964510 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964520 4911 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964539 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964550 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964561 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964575 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964590 4911 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964600 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964624 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964634 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964649 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964661 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964671 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964684 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964695 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.964706 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.966608 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.966851 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.966967 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967044 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967139 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967226 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967347 4911 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967478 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967573 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967745 4911 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967824 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967918 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.968447 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.968530 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.968602 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.968686 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.968821 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.968893 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.968949 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969009 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969081 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969209 4911 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969321 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969398 4911 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969461 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969568 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969632 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969705 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969795 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.967487 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.969869 4911 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970163 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970200 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970221 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970238 4911 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970254 4911 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970267 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970281 4911 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970295 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970309 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970323 4911 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970337 4911 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970351 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970365 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970385 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970397 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970410 4911 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970424 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970441 4911 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970456 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970469 4911 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970483 4911 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970497 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970509 4911 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970522 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970535 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970548 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970564 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970578 4911 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970591 4911 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970604 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970735 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970751 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970765 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970780 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970795 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970812 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970828 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970847 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970860 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970872 4911 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970885 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970896 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970905 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970914 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970930 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970962 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970972 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970981 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.970991 4911 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971000 4911 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971010 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971019 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971028 4911 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971038 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971050 4911 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971059 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971069 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971080 4911 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971108 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971117 4911 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971128 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971137 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971149 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971182 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971191 4911 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971200 4911 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971210 4911 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971220 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971229 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971240 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971249 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971258 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971267 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971278 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971287 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971296 4911 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971305 4911 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971313 4911 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971324 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971333 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971342 4911 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971351 4911 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971362 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971371 4911 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971381 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971391 4911 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.971789 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.972914 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.973768 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.974777 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.975432 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.976519 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.977058 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.978261 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.978752 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.979497 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.980859 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.981721 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.982423 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.983919 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.984749 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.986236 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.987343 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.988862 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.989421 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.990411 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.991036 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.991546 4911 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.991688 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.993953 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.994474 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.994931 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.996813 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.997860 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.998575 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jun 06 09:13:31 crc kubenswrapper[4911]: I0606 09:13:31.999652 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.000324 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.001162 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.001809 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.002801 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.004198 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.004717 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.005630 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.006134 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.007405 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.007919 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.008550 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.009643 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.010201 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.010815 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.011830 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.069294 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.082070 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.082849 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.083480 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.085901 4911 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f" exitCode=255 Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.085961 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f"} Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.086014 4911 scope.go:117] "RemoveContainer" containerID="1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756" Jun 06 09:13:32 crc kubenswrapper[4911]: W0606 09:13:32.090002 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-fef9cf134b7a4746d398ff1a1e43713e983ee9111ec16700e2208e3d9468796f WatchSource:0}: Error finding container fef9cf134b7a4746d398ff1a1e43713e983ee9111ec16700e2208e3d9468796f: Status 404 returned error can't find the container with id fef9cf134b7a4746d398ff1a1e43713e983ee9111ec16700e2208e3d9468796f Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.094726 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.101830 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.119054 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.134436 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.153387 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.165263 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.188330 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.228018 4911 scope.go:117] "RemoveContainer" containerID="1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f" Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.228259 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.228386 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.255884 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-sfx54"] Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.256221 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.258781 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.259137 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.259222 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.272353 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.289313 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.304287 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.316431 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.329380 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.340430 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sfx54" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d2d35b2-3290-405c-9a41-4c79790f59a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csnh7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sfx54\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.368585 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9441857-8545-4ce8-8e2e-e20e7211ef26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b3ade80b67b75dc3f26a44ebadcbfaacea0e49c4f3aeac569f467a441b42716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b702e65cf65dbec8da9125a823c877a6bfaf8745b0e4292dc2a4f886b2e9203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3fc66d21cb1eba9814737fce0deeb40ace097774c06f788f2c4c79a321225\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-06-06T09:13:24Z\\\",\\\"message\\\":\\\"W0606 09:13:14.191301 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0606 09:13:14.191627 1 crypto.go:601] Generating new CA for check-endpoints-signer@1749201194 cert, and key in /tmp/serving-cert-2276917403/serving-signer.crt, /tmp/serving-cert-2276917403/serving-signer.key\\\\nI0606 09:13:14.608669 1 observer_polling.go:159] Starting file observer\\\\nW0606 09:13:14.611058 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0606 09:13:14.611253 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0606 09:13:14.611898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2276917403/tls.crt::/tmp/serving-cert-2276917403/tls.key\\\\\\\"\\\\nF0606 09:13:24.866446 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"le observer\\\\nW0606 09:13:30.974603 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0606 09:13:30.974758 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0606 09:13:30.975432 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1640578364/tls.crt::/tmp/serving-cert-1640578364/tls.key\\\\\\\"\\\\nI0606 09:13:31.485046 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0606 09:13:31.673368 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0606 09:13:31.673396 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0606 09:13:31.673423 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0606 09:13:31.673428 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0606 09:13:31.756334 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0606 09:13:31.756368 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0606 09:13:31.756373 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0606 09:13:31.756378 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0606 09:13:31.756382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0606 09:13:31.756385 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0606 09:13:31.756387 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0606 09:13:31.757505 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0606 09:13:31.763372 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b5bb96a7312069f50f9bed423e78da3db154b4e2203a83b674bd662dab8a5e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.373561 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.373813 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:33.373768798 +0000 UTC m=+24.649194371 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.373943 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/5d2d35b2-3290-405c-9a41-4c79790f59a2-hosts-file\") pod \"node-resolver-sfx54\" (UID: \"5d2d35b2-3290-405c-9a41-4c79790f59a2\") " pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.373987 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csnh7\" (UniqueName: \"kubernetes.io/projected/5d2d35b2-3290-405c-9a41-4c79790f59a2-kube-api-access-csnh7\") pod \"node-resolver-sfx54\" (UID: \"5d2d35b2-3290-405c-9a41-4c79790f59a2\") " pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.383178 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.475491 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.475556 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/5d2d35b2-3290-405c-9a41-4c79790f59a2-hosts-file\") pod \"node-resolver-sfx54\" (UID: \"5d2d35b2-3290-405c-9a41-4c79790f59a2\") " pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.475589 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csnh7\" (UniqueName: \"kubernetes.io/projected/5d2d35b2-3290-405c-9a41-4c79790f59a2-kube-api-access-csnh7\") pod \"node-resolver-sfx54\" (UID: \"5d2d35b2-3290-405c-9a41-4c79790f59a2\") " pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.475617 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.475647 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.475682 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.475886 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.475911 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.475926 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.475989 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:33.475967475 +0000 UTC m=+24.751393018 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476469 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476516 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:33.476505357 +0000 UTC m=+24.751930900 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.476578 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/5d2d35b2-3290-405c-9a41-4c79790f59a2-hosts-file\") pod \"node-resolver-sfx54\" (UID: \"5d2d35b2-3290-405c-9a41-4c79790f59a2\") " pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476854 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476881 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476892 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476924 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:33.476914616 +0000 UTC m=+24.752340169 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476968 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.476998 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:33.476989488 +0000 UTC m=+24.752415031 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.496884 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csnh7\" (UniqueName: \"kubernetes.io/projected/5d2d35b2-3290-405c-9a41-4c79790f59a2-kube-api-access-csnh7\") pod \"node-resolver-sfx54\" (UID: \"5d2d35b2-3290-405c-9a41-4c79790f59a2\") " pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.570894 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-sfx54" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.637617 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-sz44k"] Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.638131 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-fdfc2"] Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.638485 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.638907 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gj94b"] Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.638935 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.640078 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.640484 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.643439 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-24k59"] Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.643523 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.643697 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.643836 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.644123 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.644170 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.644333 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.644329 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.644372 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.645581 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.645602 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.645873 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.646420 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.647181 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.647615 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.647790 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.647886 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.649628 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.649900 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.650024 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.659476 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.678323 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.691283 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.700924 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sfx54" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d2d35b2-3290-405c-9a41-4c79790f59a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csnh7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sfx54\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.713935 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fdfc2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsr8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fdfc2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.730652 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9441857-8545-4ce8-8e2e-e20e7211ef26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b3ade80b67b75dc3f26a44ebadcbfaacea0e49c4f3aeac569f467a441b42716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b702e65cf65dbec8da9125a823c877a6bfaf8745b0e4292dc2a4f886b2e9203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3fc66d21cb1eba9814737fce0deeb40ace097774c06f788f2c4c79a321225\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-06-06T09:13:24Z\\\",\\\"message\\\":\\\"W0606 09:13:14.191301 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0606 09:13:14.191627 1 crypto.go:601] Generating new CA for check-endpoints-signer@1749201194 cert, and key in /tmp/serving-cert-2276917403/serving-signer.crt, /tmp/serving-cert-2276917403/serving-signer.key\\\\nI0606 09:13:14.608669 1 observer_polling.go:159] Starting file observer\\\\nW0606 09:13:14.611058 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0606 09:13:14.611253 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0606 09:13:14.611898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2276917403/tls.crt::/tmp/serving-cert-2276917403/tls.key\\\\\\\"\\\\nF0606 09:13:24.866446 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"le observer\\\\nW0606 09:13:30.974603 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0606 09:13:30.974758 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0606 09:13:30.975432 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1640578364/tls.crt::/tmp/serving-cert-1640578364/tls.key\\\\\\\"\\\\nI0606 09:13:31.485046 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0606 09:13:31.673368 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0606 09:13:31.673396 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0606 09:13:31.673423 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0606 09:13:31.673428 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0606 09:13:31.756334 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0606 09:13:31.756368 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0606 09:13:31.756373 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0606 09:13:31.756378 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0606 09:13:31.756382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0606 09:13:31.756385 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0606 09:13:31.756387 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0606 09:13:31.757505 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0606 09:13:31.763372 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b5bb96a7312069f50f9bed423e78da3db154b4e2203a83b674bd662dab8a5e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.743616 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.759397 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.770810 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783329 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-socket-dir-parent\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783421 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovn-node-metrics-cert\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783456 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gszlb\" (UniqueName: \"kubernetes.io/projected/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-kube-api-access-gszlb\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783487 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-tuning-conf-dir\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-system-cni-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-bin\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783587 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-netns\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783653 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-kubelet\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783687 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-k8s-cni-cncf-io\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783714 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-systemd-units\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783740 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-netns\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783771 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-ovn\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783827 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/524ab803-c5fe-443c-8a85-b3f0a34b8a55-proxy-tls\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783933 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/524ab803-c5fe-443c-8a85-b3f0a34b8a55-mcd-auth-proxy-config\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.783986 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-cni-binary-copy\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784019 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784121 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/524ab803-c5fe-443c-8a85-b3f0a34b8a55-rootfs\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784169 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-node-log\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784210 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-kubelet\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784238 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-ovn-kubernetes\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784267 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-cni-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784339 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-conf-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784373 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-config\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784403 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-env-overrides\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784429 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-hostroot\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784454 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-daemon-config\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784479 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784503 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-script-lib\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.784531 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cni-binary-copy\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.785841 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-os-release\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.785901 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cnibin\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.785942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bhlq\" (UniqueName: \"kubernetes.io/projected/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-kube-api-access-9bhlq\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.785985 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-system-cni-dir\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786014 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgt5c\" (UniqueName: \"kubernetes.io/projected/524ab803-c5fe-443c-8a85-b3f0a34b8a55-kube-api-access-mgt5c\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786045 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-cnibin\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786079 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-cni-bin\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786138 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-log-socket\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786169 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-var-lib-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786197 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-multus-certs\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786227 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-etc-kubernetes\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786278 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsr8h\" (UniqueName: \"kubernetes.io/projected/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-kube-api-access-hsr8h\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786311 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-os-release\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786335 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-systemd\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786364 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-netd\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786393 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-etc-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786426 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786489 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-cni-multus\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.786534 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-slash\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.787408 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.804600 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9bhlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gj94b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.818904 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9441857-8545-4ce8-8e2e-e20e7211ef26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b3ade80b67b75dc3f26a44ebadcbfaacea0e49c4f3aeac569f467a441b42716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b702e65cf65dbec8da9125a823c877a6bfaf8745b0e4292dc2a4f886b2e9203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46a3fc66d21cb1eba9814737fce0deeb40ace097774c06f788f2c4c79a321225\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1900a48434ecd45580b3f2e3490b9c84d0250cd414e4e1b1ad14d7c87d345756\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-06-06T09:13:24Z\\\",\\\"message\\\":\\\"W0606 09:13:14.191301 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0606 09:13:14.191627 1 crypto.go:601] Generating new CA for check-endpoints-signer@1749201194 cert, and key in /tmp/serving-cert-2276917403/serving-signer.crt, /tmp/serving-cert-2276917403/serving-signer.key\\\\nI0606 09:13:14.608669 1 observer_polling.go:159] Starting file observer\\\\nW0606 09:13:14.611058 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0606 09:13:14.611253 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0606 09:13:14.611898 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2276917403/tls.crt::/tmp/serving-cert-2276917403/tls.key\\\\\\\"\\\\nF0606 09:13:24.866446 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"le observer\\\\nW0606 09:13:30.974603 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0606 09:13:30.974758 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0606 09:13:30.975432 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1640578364/tls.crt::/tmp/serving-cert-1640578364/tls.key\\\\\\\"\\\\nI0606 09:13:31.485046 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0606 09:13:31.673368 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0606 09:13:31.673396 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0606 09:13:31.673423 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0606 09:13:31.673428 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0606 09:13:31.756334 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0606 09:13:31.756368 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0606 09:13:31.756373 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0606 09:13:31.756378 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0606 09:13:31.756382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0606 09:13:31.756385 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0606 09:13:31.756387 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0606 09:13:31.757505 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0606 09:13:31.763372 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b5bb96a7312069f50f9bed423e78da3db154b4e2203a83b674bd662dab8a5e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-06-06T09:13:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f69d826d928a887e6b947f0319ee3db6cdf38257750465cd5e1f221cb7c00bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-06-06T09:13:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-06-06T09:13:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.829810 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.840365 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.853066 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-fdfc2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsr8h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-fdfc2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.865681 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.883960 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887543 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-config\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887590 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-env-overrides\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887613 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-hostroot\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887636 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-conf-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887652 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-daemon-config\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887668 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887685 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-script-lib\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887703 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cni-binary-copy\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887722 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-os-release\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887737 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cnibin\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887754 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bhlq\" (UniqueName: \"kubernetes.io/projected/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-kube-api-access-9bhlq\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-system-cni-dir\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887789 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgt5c\" (UniqueName: \"kubernetes.io/projected/524ab803-c5fe-443c-8a85-b3f0a34b8a55-kube-api-access-mgt5c\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887806 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-cnibin\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887825 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-cni-bin\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887847 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-var-lib-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887883 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-log-socket\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887907 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-etc-kubernetes\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887880 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-hostroot\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887931 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsr8h\" (UniqueName: \"kubernetes.io/projected/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-kube-api-access-hsr8h\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887968 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-multus-certs\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887961 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887980 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-system-cni-dir\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.887995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-systemd\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888059 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-systemd\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888059 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-cni-bin\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888120 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-cnibin\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-etc-kubernetes\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888145 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-netd\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888192 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-multus-certs\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888262 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-netd\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888274 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-log-socket\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888292 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-os-release\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888329 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cnibin\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888357 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-os-release\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888381 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888363 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-var-lib-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888462 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-os-release\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888554 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-cni-multus\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888622 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-cni-multus\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888653 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-slash\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888659 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-env-overrides\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888684 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-etc-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888721 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-slash\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888748 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-socket-dir-parent\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888785 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovn-node-metrics-cert\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888749 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-etc-openvswitch\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888825 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gszlb\" (UniqueName: \"kubernetes.io/projected/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-kube-api-access-gszlb\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888884 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-system-cni-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-bin\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888938 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-tuning-conf-dir\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888951 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-socket-dir-parent\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-netns\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.888993 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-netns\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889003 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-kubelet\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889033 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-systemd-units\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889043 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-system-cni-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889061 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-netns\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889082 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-bin\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889108 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-ovn\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889139 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-ovn\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889154 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/524ab803-c5fe-443c-8a85-b3f0a34b8a55-proxy-tls\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889176 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-var-lib-kubelet\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889202 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-daemon-config\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889264 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-config\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889304 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-netns\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889342 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889354 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-systemd-units\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889178 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/524ab803-c5fe-443c-8a85-b3f0a34b8a55-mcd-auth-proxy-config\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889399 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-cni-binary-copy\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889395 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-cni-binary-copy\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889428 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-k8s-cni-cncf-io\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889459 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-host-run-k8s-cni-cncf-io\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889493 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889561 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/524ab803-c5fe-443c-8a85-b3f0a34b8a55-rootfs\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889586 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-node-log\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889628 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-ovn-kubernetes\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889656 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-cni-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889676 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-kubelet\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.890189 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/524ab803-c5fe-443c-8a85-b3f0a34b8a55-mcd-auth-proxy-config\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-node-log\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889859 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889884 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-tuning-conf-dir\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889893 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/524ab803-c5fe-443c-8a85-b3f0a34b8a55-rootfs\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889929 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-ovn-kubernetes\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889927 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-conf-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.890022 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-script-lib\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.890066 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-cni-binary-copy\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.890135 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-multus-cni-dir\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.889963 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-kubelet\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.893522 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovn-node-metrics-cert\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.898165 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/524ab803-c5fe-443c-8a85-b3f0a34b8a55-proxy-tls\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.899389 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-24k59\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.907748 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bhlq\" (UniqueName: \"kubernetes.io/projected/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-kube-api-access-9bhlq\") pod \"ovnkube-node-gj94b\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.909002 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgt5c\" (UniqueName: \"kubernetes.io/projected/524ab803-c5fe-443c-8a85-b3f0a34b8a55-kube-api-access-mgt5c\") pod \"machine-config-daemon-sz44k\" (UID: \"524ab803-c5fe-443c-8a85-b3f0a34b8a55\") " pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.909030 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gszlb\" (UniqueName: \"kubernetes.io/projected/d9640d08-8c4d-4ee9-b4b8-be68e62b7547-kube-api-access-gszlb\") pod \"multus-additional-cni-plugins-24k59\" (UID: \"d9640d08-8c4d-4ee9-b4b8-be68e62b7547\") " pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.910901 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsr8h\" (UniqueName: \"kubernetes.io/projected/962d1f6e-6277-4a04-ad9d-199d3f9f7e72-kube-api-access-hsr8h\") pod \"multus-fdfc2\" (UID: \"962d1f6e-6277-4a04-ad9d-199d3f9f7e72\") " pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.912703 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.922626 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sfx54" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d2d35b2-3290-405c-9a41-4c79790f59a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csnh7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sfx54\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.934301 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"524ab803-c5fe-443c-8a85-b3f0a34b8a55\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgt5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgt5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sz44k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.947885 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:32 crc kubenswrapper[4911]: E0606 09:13:32.948063 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.951949 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-fdfc2" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.961004 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-24k59" Jun 06 09:13:32 crc kubenswrapper[4911]: W0606 09:13:32.965930 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod962d1f6e_6277_4a04_ad9d_199d3f9f7e72.slice/crio-6021ec1cb0a35e279cbb299b3002e0bf08511e59c674e13edeac76bc80cf6e7f WatchSource:0}: Error finding container 6021ec1cb0a35e279cbb299b3002e0bf08511e59c674e13edeac76bc80cf6e7f: Status 404 returned error can't find the container with id 6021ec1cb0a35e279cbb299b3002e0bf08511e59c674e13edeac76bc80cf6e7f Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.969354 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:13:32 crc kubenswrapper[4911]: I0606 09:13:32.974600 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:32 crc kubenswrapper[4911]: W0606 09:13:32.976810 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9640d08_8c4d_4ee9_b4b8_be68e62b7547.slice/crio-e8652c7abb0351c16300a9a64e0318d0e16b8814b8cef7d747a3a14457a4ca6b WatchSource:0}: Error finding container e8652c7abb0351c16300a9a64e0318d0e16b8814b8cef7d747a3a14457a4ca6b: Status 404 returned error can't find the container with id e8652c7abb0351c16300a9a64e0318d0e16b8814b8cef7d747a3a14457a4ca6b Jun 06 09:13:32 crc kubenswrapper[4911]: W0606 09:13:32.991355 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod524ab803_c5fe_443c_8a85_b3f0a34b8a55.slice/crio-0756352b2315f90a83415f4e2eaf2feb6d426acd8d330faf0314aa37728864d1 WatchSource:0}: Error finding container 0756352b2315f90a83415f4e2eaf2feb6d426acd8d330faf0314aa37728864d1: Status 404 returned error can't find the container with id 0756352b2315f90a83415f4e2eaf2feb6d426acd8d330faf0314aa37728864d1 Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.095072 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.106196 4911 scope.go:117] "RemoveContainer" containerID="1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f" Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.106397 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.108311 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fdfc2" event={"ID":"962d1f6e-6277-4a04-ad9d-199d3f9f7e72","Type":"ContainerStarted","Data":"6021ec1cb0a35e279cbb299b3002e0bf08511e59c674e13edeac76bc80cf6e7f"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.124333 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d78da04ba83a5ba3863a4ac8d3915ddea4f5b588345f10588b54cd65e6f22936"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.134411 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.146034 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sfx54" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d2d35b2-3290-405c-9a41-4c79790f59a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-csnh7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sfx54\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.162719 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"524ab803-c5fe-443c-8a85-b3f0a34b8a55\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgt5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mgt5c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-06-06T09:13:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sz44k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.164425 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9ddcbc95e3af17210d7662f9eb2c0c8295d72f13b1b7f918ab5a7612b27eddf0"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.164499 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0540708ff7146c6f8872fb37ceb19287ab268f6e49b19c3fe014106129ebbf3d"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.164520 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"805c368cb2a1c134df8a3f0f90ddc3c9271fc7768e86aaa017fedf964fa2cf2c"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.169524 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"418c72e49ceb1a465238316e949276dcafcfc97f1ef8a9dd520410afbf57c879"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.170754 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerStarted","Data":"e8652c7abb0351c16300a9a64e0318d0e16b8814b8cef7d747a3a14457a4ca6b"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.175161 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4b2eb6cec0b0e491f241bc793d3e288e0f0fef397eb86b0bc8cb0f710b3ffe65"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.175221 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fef9cf134b7a4746d398ff1a1e43713e983ee9111ec16700e2208e3d9468796f"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.176686 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"0756352b2315f90a83415f4e2eaf2feb6d426acd8d330faf0314aa37728864d1"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.177562 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-sfx54" event={"ID":"5d2d35b2-3290-405c-9a41-4c79790f59a2","Type":"ContainerStarted","Data":"29df3a814a70019132e468550825976adf5895bda9c2ac7015888b3e69ba18fb"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.177599 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-sfx54" event={"ID":"5d2d35b2-3290-405c-9a41-4c79790f59a2","Type":"ContainerStarted","Data":"a2e9addfa55cf64976a9466a757fdc360f4cba2b1f7e05355e618f6f519b27ed"} Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.214935 4911 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-06-06T09:13:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.387049 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-slmml"] Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.387540 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.389782 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.390670 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.391185 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.394375 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.405295 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.405519 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:35.405482129 +0000 UTC m=+26.680907662 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.498968 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-sfx54" podStartSLOduration=2.498943679 podStartE2EDuration="2.498943679s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:33.4985541 +0000 UTC m=+24.773979663" watchObservedRunningTime="2025-06-06 09:13:33.498943679 +0000 UTC m=+24.774369222" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.506569 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.506629 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.506672 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a124ffd1-166b-4ccf-856b-006dc1937b15-serviceca\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.506703 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.506734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.506763 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a124ffd1-166b-4ccf-856b-006dc1937b15-host\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.506821 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.506888 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.506942 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.506944 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.506959 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.506906 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:35.506889698 +0000 UTC m=+26.782315241 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.506989 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.507016 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.507034 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.507040 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4mrf\" (UniqueName: \"kubernetes.io/projected/a124ffd1-166b-4ccf-856b-006dc1937b15-kube-api-access-c4mrf\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.507062 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:35.507032711 +0000 UTC m=+26.782458444 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.507141 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:35.507128843 +0000 UTC m=+26.782554566 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.507162 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:35.507153444 +0000 UTC m=+26.782579197 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.608514 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a124ffd1-166b-4ccf-856b-006dc1937b15-serviceca\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.608667 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a124ffd1-166b-4ccf-856b-006dc1937b15-host\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.608698 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4mrf\" (UniqueName: \"kubernetes.io/projected/a124ffd1-166b-4ccf-856b-006dc1937b15-kube-api-access-c4mrf\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.608828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a124ffd1-166b-4ccf-856b-006dc1937b15-host\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.610019 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a124ffd1-166b-4ccf-856b-006dc1937b15-serviceca\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.631037 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4mrf\" (UniqueName: \"kubernetes.io/projected/a124ffd1-166b-4ccf-856b-006dc1937b15-kube-api-access-c4mrf\") pod \"node-ca-slmml\" (UID: \"a124ffd1-166b-4ccf-856b-006dc1937b15\") " pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.702588 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-slmml" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.726371 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2"] Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.727036 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.728700 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.728964 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.752262 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-w544n"] Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.752763 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.752840 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-w544n" podUID="ede76ff2-387d-4778-b8f5-0dbbc5cf5c35" Jun 06 09:13:33 crc kubenswrapper[4911]: W0606 09:13:33.783200 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda124ffd1_166b_4ccf_856b_006dc1937b15.slice/crio-cc6c7d8f6ba1d13843f60d1ce92fadee4c260bb82989a5af79ed30a399b71896 WatchSource:0}: Error finding container cc6c7d8f6ba1d13843f60d1ce92fadee4c260bb82989a5af79ed30a399b71896: Status 404 returned error can't find the container with id cc6c7d8f6ba1d13843f60d1ce92fadee4c260bb82989a5af79ed30a399b71896 Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.912317 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0035170-ca20-46c6-aae2-81e5adbaf909-env-overrides\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.912361 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0035170-ca20-46c6-aae2-81e5adbaf909-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.912381 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7b2z\" (UniqueName: \"kubernetes.io/projected/e0035170-ca20-46c6-aae2-81e5adbaf909-kube-api-access-l7b2z\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.912490 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.912539 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf9s4\" (UniqueName: \"kubernetes.io/projected/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-kube-api-access-lf9s4\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.912566 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0035170-ca20-46c6-aae2-81e5adbaf909-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.946900 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:33 crc kubenswrapper[4911]: I0606 09:13:33.946936 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.947120 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jun 06 09:13:33 crc kubenswrapper[4911]: E0606 09:13:33.947239 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.014746 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0035170-ca20-46c6-aae2-81e5adbaf909-env-overrides\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.014844 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0035170-ca20-46c6-aae2-81e5adbaf909-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.014873 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7b2z\" (UniqueName: \"kubernetes.io/projected/e0035170-ca20-46c6-aae2-81e5adbaf909-kube-api-access-l7b2z\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.014937 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf9s4\" (UniqueName: \"kubernetes.io/projected/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-kube-api-access-lf9s4\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.014962 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.014980 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0035170-ca20-46c6-aae2-81e5adbaf909-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.015603 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e0035170-ca20-46c6-aae2-81e5adbaf909-env-overrides\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: E0606 09:13:34.015732 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:34 crc kubenswrapper[4911]: E0606 09:13:34.015811 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs podName:ede76ff2-387d-4778-b8f5-0dbbc5cf5c35 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:34.515793445 +0000 UTC m=+25.791218988 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs") pod "network-metrics-daemon-w544n" (UID: "ede76ff2-387d-4778-b8f5-0dbbc5cf5c35") : object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.015845 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e0035170-ca20-46c6-aae2-81e5adbaf909-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.020443 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e0035170-ca20-46c6-aae2-81e5adbaf909-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.033464 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf9s4\" (UniqueName: \"kubernetes.io/projected/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-kube-api-access-lf9s4\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.037421 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7b2z\" (UniqueName: \"kubernetes.io/projected/e0035170-ca20-46c6-aae2-81e5adbaf909-kube-api-access-l7b2z\") pod \"ovnkube-control-plane-749d76644c-j89f2\" (UID: \"e0035170-ca20-46c6-aae2-81e5adbaf909\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.090833 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" Jun 06 09:13:34 crc kubenswrapper[4911]: W0606 09:13:34.104840 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0035170_ca20_46c6_aae2_81e5adbaf909.slice/crio-746af82857b0705c1faacee9770d32653d0004feceb0f1f3bdae8b31f267f41d WatchSource:0}: Error finding container 746af82857b0705c1faacee9770d32653d0004feceb0f1f3bdae8b31f267f41d: Status 404 returned error can't find the container with id 746af82857b0705c1faacee9770d32653d0004feceb0f1f3bdae8b31f267f41d Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.187778 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" event={"ID":"e0035170-ca20-46c6-aae2-81e5adbaf909","Type":"ContainerStarted","Data":"746af82857b0705c1faacee9770d32653d0004feceb0f1f3bdae8b31f267f41d"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.191037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-slmml" event={"ID":"a124ffd1-166b-4ccf-856b-006dc1937b15","Type":"ContainerStarted","Data":"fb3af518e51206961d99ab37f1ac83145305d785a9e930f484834fc59422b02d"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.191144 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-slmml" event={"ID":"a124ffd1-166b-4ccf-856b-006dc1937b15","Type":"ContainerStarted","Data":"cc6c7d8f6ba1d13843f60d1ce92fadee4c260bb82989a5af79ed30a399b71896"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.192822 4911 generic.go:334] "Generic (PLEG): container finished" podID="d9640d08-8c4d-4ee9-b4b8-be68e62b7547" containerID="090689a3dd702ccbb2cb2eb1d4946dc3551d471f7c151d45d77c0226bc3503a7" exitCode=0 Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.192908 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerDied","Data":"090689a3dd702ccbb2cb2eb1d4946dc3551d471f7c151d45d77c0226bc3503a7"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.197574 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"2e0885ed6e60e066f5e87ec50c74037552ee3ddb891c1f0a84af604100a0c1a3"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.197623 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"c837d3cdc3edeb58703b5c9ea9f4e1684698e0b2806ed5cdf89b0984d3907c7b"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.200238 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885" exitCode=0 Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.200290 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.202651 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fdfc2" event={"ID":"962d1f6e-6277-4a04-ad9d-199d3f9f7e72","Type":"ContainerStarted","Data":"fdb97be513aa3c68ba9fa14bf88ecddccdedefce0304f429656609ae37872dff"} Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.212503 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.226735 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.235733 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-slmml" podStartSLOduration=3.23570525 podStartE2EDuration="3.23570525s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:34.211472533 +0000 UTC m=+25.486898086" watchObservedRunningTime="2025-06-06 09:13:34.23570525 +0000 UTC m=+25.511130793" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.288874 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podStartSLOduration=3.288852779 podStartE2EDuration="3.288852779s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:34.259267571 +0000 UTC m=+25.534693114" watchObservedRunningTime="2025-06-06 09:13:34.288852779 +0000 UTC m=+25.564278322" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.302962 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.319800 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-fdfc2" podStartSLOduration=3.319760887 podStartE2EDuration="3.319760887s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:34.312756909 +0000 UTC m=+25.588182452" watchObservedRunningTime="2025-06-06 09:13:34.319760887 +0000 UTC m=+25.595186460" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.354286 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=0.354261976 podStartE2EDuration="354.261976ms" podCreationTimestamp="2025-06-06 09:13:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:34.350387128 +0000 UTC m=+25.625812681" watchObservedRunningTime="2025-06-06 09:13:34.354261976 +0000 UTC m=+25.629687519" Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.523829 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:34 crc kubenswrapper[4911]: E0606 09:13:34.524039 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:34 crc kubenswrapper[4911]: E0606 09:13:34.524132 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs podName:ede76ff2-387d-4778-b8f5-0dbbc5cf5c35 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:35.52411333 +0000 UTC m=+26.799538873 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs") pod "network-metrics-daemon-w544n" (UID: "ede76ff2-387d-4778-b8f5-0dbbc5cf5c35") : object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:34 crc kubenswrapper[4911]: I0606 09:13:34.947231 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:34 crc kubenswrapper[4911]: E0606 09:13:34.947869 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.208469 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" event={"ID":"e0035170-ca20-46c6-aae2-81e5adbaf909","Type":"ContainerStarted","Data":"f0d01dbba9c57350d7786796e63008a7239ee27df8a8d7e282424ffc31546cc7"} Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.208697 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" event={"ID":"e0035170-ca20-46c6-aae2-81e5adbaf909","Type":"ContainerStarted","Data":"027a45e37436c371016a365a151038fae20b4c8666eb7a90eeb8cc0734105334"} Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.211296 4911 generic.go:334] "Generic (PLEG): container finished" podID="d9640d08-8c4d-4ee9-b4b8-be68e62b7547" containerID="1a646161a503d7a411420d31e7edba3ae67b8a70c0b4705cd870e04b4c9b9c90" exitCode=0 Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.211399 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerDied","Data":"1a646161a503d7a411420d31e7edba3ae67b8a70c0b4705cd870e04b4c9b9c90"} Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.214934 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.214964 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.214975 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.227730 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j89f2" podStartSLOduration=3.227709054 podStartE2EDuration="3.227709054s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:35.22665455 +0000 UTC m=+26.502080113" watchObservedRunningTime="2025-06-06 09:13:35.227709054 +0000 UTC m=+26.503134597" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.435639 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.435912 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:39.435867063 +0000 UTC m=+30.711292606 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.537758 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.537827 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.537856 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.537886 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.537912 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538020 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538083 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538020 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538147 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:39.538123411 +0000 UTC m=+30.813549124 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538044 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538247 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538249 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs podName:ede76ff2-387d-4778-b8f5-0dbbc5cf5c35 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:37.538221243 +0000 UTC m=+28.813646966 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs") pod "network-metrics-daemon-w544n" (UID: "ede76ff2-387d-4778-b8f5-0dbbc5cf5c35") : object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538264 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538194 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538347 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:39.538318266 +0000 UTC m=+30.813743999 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538365 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538033 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538429 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:39.538418508 +0000 UTC m=+30.813844241 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.538459 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:39.538446789 +0000 UTC m=+30.813872332 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.947718 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.948233 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-w544n" podUID="ede76ff2-387d-4778-b8f5-0dbbc5cf5c35" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.947853 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:35 crc kubenswrapper[4911]: I0606 09:13:35.947814 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.948432 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jun 06 09:13:35 crc kubenswrapper[4911]: E0606 09:13:35.948637 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.227142 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.227205 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.227219 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.232775 4911 generic.go:334] "Generic (PLEG): container finished" podID="d9640d08-8c4d-4ee9-b4b8-be68e62b7547" containerID="5b73b8f46e2d4c3277d5a73739445791c9028f463ebd202610ed3defca659f5e" exitCode=0 Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.232842 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerDied","Data":"5b73b8f46e2d4c3277d5a73739445791c9028f463ebd202610ed3defca659f5e"} Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.441555 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.445622 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.454263 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.857068 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.857937 4911 scope.go:117] "RemoveContainer" containerID="1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f" Jun 06 09:13:36 crc kubenswrapper[4911]: E0606 09:13:36.858127 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jun 06 09:13:36 crc kubenswrapper[4911]: I0606 09:13:36.947019 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:36 crc kubenswrapper[4911]: E0606 09:13:36.947181 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.238573 4911 generic.go:334] "Generic (PLEG): container finished" podID="d9640d08-8c4d-4ee9-b4b8-be68e62b7547" containerID="70cbb2cc710a17bb90ed945eaee8b1ca5f6e0e64e6ca52c4f6a21aaf1a7f47d5" exitCode=0 Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.238665 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerDied","Data":"70cbb2cc710a17bb90ed945eaee8b1ca5f6e0e64e6ca52c4f6a21aaf1a7f47d5"} Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.240456 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"578c510aea8cf4d2b98ff0c1caf8e1e771e7492fa59fa2980e8b8bae1d370610"} Jun 06 09:13:37 crc kubenswrapper[4911]: E0606 09:13:37.250018 4911 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.299814 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=1.29978646 podStartE2EDuration="1.29978646s" podCreationTimestamp="2025-06-06 09:13:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:37.283520743 +0000 UTC m=+28.558946296" watchObservedRunningTime="2025-06-06 09:13:37.29978646 +0000 UTC m=+28.575212003" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.464238 4911 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.466578 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.466628 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.466637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.466750 4911 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.474145 4911 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.474516 4911 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.475637 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.475675 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.475684 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.475699 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.475709 4911 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-06-06T09:13:37Z","lastTransitionTime":"2025-06-06T09:13:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.522656 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74"] Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.523410 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.525084 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.525561 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.525584 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.525799 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.571241 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.571291 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/227358ee-53fa-4f78-9894-854b4d7253cf-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.571348 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/227358ee-53fa-4f78-9894-854b4d7253cf-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: E0606 09:13:37.571360 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:37 crc kubenswrapper[4911]: E0606 09:13:37.571448 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs podName:ede76ff2-387d-4778-b8f5-0dbbc5cf5c35 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:41.571424681 +0000 UTC m=+32.846850424 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs") pod "network-metrics-daemon-w544n" (UID: "ede76ff2-387d-4778-b8f5-0dbbc5cf5c35") : object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.571368 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/227358ee-53fa-4f78-9894-854b4d7253cf-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.571525 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/227358ee-53fa-4f78-9894-854b4d7253cf-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.571546 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/227358ee-53fa-4f78-9894-854b4d7253cf-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.672355 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/227358ee-53fa-4f78-9894-854b4d7253cf-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.672417 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/227358ee-53fa-4f78-9894-854b4d7253cf-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.672473 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/227358ee-53fa-4f78-9894-854b4d7253cf-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.672485 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/227358ee-53fa-4f78-9894-854b4d7253cf-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.672698 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/227358ee-53fa-4f78-9894-854b4d7253cf-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.672725 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/227358ee-53fa-4f78-9894-854b4d7253cf-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.672787 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/227358ee-53fa-4f78-9894-854b4d7253cf-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.673448 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/227358ee-53fa-4f78-9894-854b4d7253cf-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.681402 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/227358ee-53fa-4f78-9894-854b4d7253cf-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.689885 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/227358ee-53fa-4f78-9894-854b4d7253cf-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h8f74\" (UID: \"227358ee-53fa-4f78-9894-854b4d7253cf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.840240 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.947568 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.947703 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:37 crc kubenswrapper[4911]: E0606 09:13:37.948042 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jun 06 09:13:37 crc kubenswrapper[4911]: I0606 09:13:37.947778 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:37 crc kubenswrapper[4911]: E0606 09:13:37.948167 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jun 06 09:13:37 crc kubenswrapper[4911]: E0606 09:13:37.948434 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-w544n" podUID="ede76ff2-387d-4778-b8f5-0dbbc5cf5c35" Jun 06 09:13:38 crc kubenswrapper[4911]: I0606 09:13:38.249304 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerStarted","Data":"8a9ca0f49d71525ffe3e17e49bb8752c3feed09e2da37353bfa654a75edc8106"} Jun 06 09:13:38 crc kubenswrapper[4911]: I0606 09:13:38.256251 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} Jun 06 09:13:38 crc kubenswrapper[4911]: I0606 09:13:38.258212 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" event={"ID":"227358ee-53fa-4f78-9894-854b4d7253cf","Type":"ContainerStarted","Data":"7c779c83c754b69a9bf72fbc1efb36e85e45cd3a0b544b2baa39175dcd42e6ea"} Jun 06 09:13:38 crc kubenswrapper[4911]: I0606 09:13:38.258258 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" event={"ID":"227358ee-53fa-4f78-9894-854b4d7253cf","Type":"ContainerStarted","Data":"a433b353565daec80ae06628c2d2f69a306897dac45c0cbd7128f192398b5ec4"} Jun 06 09:13:38 crc kubenswrapper[4911]: I0606 09:13:38.291593 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h8f74" podStartSLOduration=7.291568318 podStartE2EDuration="7.291568318s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:38.290919534 +0000 UTC m=+29.566345087" watchObservedRunningTime="2025-06-06 09:13:38.291568318 +0000 UTC m=+29.566993861" Jun 06 09:13:38 crc kubenswrapper[4911]: I0606 09:13:38.947806 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:38 crc kubenswrapper[4911]: E0606 09:13:38.948298 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.264825 4911 generic.go:334] "Generic (PLEG): container finished" podID="d9640d08-8c4d-4ee9-b4b8-be68e62b7547" containerID="8a9ca0f49d71525ffe3e17e49bb8752c3feed09e2da37353bfa654a75edc8106" exitCode=0 Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.264881 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerDied","Data":"8a9ca0f49d71525ffe3e17e49bb8752c3feed09e2da37353bfa654a75edc8106"} Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.522423 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.522658 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:47.522626138 +0000 UTC m=+38.798051681 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.623824 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.623900 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.623931 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.623959 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624062 4911 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624140 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:47.62412223 +0000 UTC m=+38.899547773 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624148 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624197 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624220 4911 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624250 4911 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624326 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:47.624288054 +0000 UTC m=+38.899713737 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624370 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:47.624339685 +0000 UTC m=+38.899765398 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624399 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624463 4911 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624482 4911 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.624570 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:47.624545629 +0000 UTC m=+38.899971362 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.947257 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.947402 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.948458 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.948605 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-w544n" podUID="ede76ff2-387d-4778-b8f5-0dbbc5cf5c35" Jun 06 09:13:39 crc kubenswrapper[4911]: I0606 09:13:39.954498 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:39 crc kubenswrapper[4911]: E0606 09:13:39.954695 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.272215 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerStarted","Data":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.272676 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.272696 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.281854 4911 generic.go:334] "Generic (PLEG): container finished" podID="d9640d08-8c4d-4ee9-b4b8-be68e62b7547" containerID="a9ea277f9c73d3710cc571f054f0f22b4dee57ce7e3d30cc92552d74dba20e71" exitCode=0 Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.281901 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerDied","Data":"a9ea277f9c73d3710cc571f054f0f22b4dee57ce7e3d30cc92552d74dba20e71"} Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.305286 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.305662 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.324732 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podStartSLOduration=9.324494301 podStartE2EDuration="9.324494301s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:40.302907073 +0000 UTC m=+31.578332626" watchObservedRunningTime="2025-06-06 09:13:40.324494301 +0000 UTC m=+31.599919864" Jun 06 09:13:40 crc kubenswrapper[4911]: I0606 09:13:40.947525 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:40 crc kubenswrapper[4911]: E0606 09:13:40.947696 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:41 crc kubenswrapper[4911]: I0606 09:13:41.290084 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-24k59" event={"ID":"d9640d08-8c4d-4ee9-b4b8-be68e62b7547","Type":"ContainerStarted","Data":"325845fbeb4daff8f3b68c1332d6d6f44708412fad9a63395c5c3f6ef7570beb"} Jun 06 09:13:41 crc kubenswrapper[4911]: I0606 09:13:41.290139 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:13:41 crc kubenswrapper[4911]: I0606 09:13:41.313238 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-24k59" podStartSLOduration=10.31321323 podStartE2EDuration="10.31321323s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:41.312514624 +0000 UTC m=+32.587940197" watchObservedRunningTime="2025-06-06 09:13:41.31321323 +0000 UTC m=+32.588638763" Jun 06 09:13:41 crc kubenswrapper[4911]: I0606 09:13:41.645427 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:41 crc kubenswrapper[4911]: E0606 09:13:41.645636 4911 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:41 crc kubenswrapper[4911]: E0606 09:13:41.645717 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs podName:ede76ff2-387d-4778-b8f5-0dbbc5cf5c35 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.645697596 +0000 UTC m=+40.921123139 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs") pod "network-metrics-daemon-w544n" (UID: "ede76ff2-387d-4778-b8f5-0dbbc5cf5c35") : object "openshift-multus"/"metrics-daemon-secret" not registered Jun 06 09:13:41 crc kubenswrapper[4911]: I0606 09:13:41.947603 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:41 crc kubenswrapper[4911]: I0606 09:13:41.947628 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:41 crc kubenswrapper[4911]: E0606 09:13:41.947743 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-w544n" podUID="ede76ff2-387d-4778-b8f5-0dbbc5cf5c35" Jun 06 09:13:41 crc kubenswrapper[4911]: I0606 09:13:41.947758 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:41 crc kubenswrapper[4911]: E0606 09:13:41.947851 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jun 06 09:13:41 crc kubenswrapper[4911]: E0606 09:13:41.947912 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jun 06 09:13:42 crc kubenswrapper[4911]: I0606 09:13:42.293007 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:13:42 crc kubenswrapper[4911]: I0606 09:13:42.646717 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-w544n"] Jun 06 09:13:42 crc kubenswrapper[4911]: I0606 09:13:42.646865 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:42 crc kubenswrapper[4911]: E0606 09:13:42.647001 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-w544n" podUID="ede76ff2-387d-4778-b8f5-0dbbc5cf5c35" Jun 06 09:13:42 crc kubenswrapper[4911]: I0606 09:13:42.947130 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:42 crc kubenswrapper[4911]: E0606 09:13:42.947299 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:43 crc kubenswrapper[4911]: I0606 09:13:43.947009 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:43 crc kubenswrapper[4911]: I0606 09:13:43.947009 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:43 crc kubenswrapper[4911]: E0606 09:13:43.947240 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jun 06 09:13:43 crc kubenswrapper[4911]: E0606 09:13:43.947356 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jun 06 09:13:44 crc kubenswrapper[4911]: I0606 09:13:44.947311 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:44 crc kubenswrapper[4911]: I0606 09:13:44.947342 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:44 crc kubenswrapper[4911]: E0606 09:13:44.947480 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-w544n" podUID="ede76ff2-387d-4778-b8f5-0dbbc5cf5c35" Jun 06 09:13:44 crc kubenswrapper[4911]: E0606 09:13:44.947581 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.089847 4911 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.089980 4911 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.129911 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r56ns"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.130538 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.132256 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p5p9k"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.132678 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.132884 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.133497 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.138886 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.139520 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.140872 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c"] Jun 06 09:13:45 crc kubenswrapper[4911]: W0606 09:13:45.142480 4911 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jun 06 09:13:45 crc kubenswrapper[4911]: E0606 09:13:45.142547 4911 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.142503 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-4dcwp"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.142630 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.142870 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.143150 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.143512 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.143796 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.144585 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.144748 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rqvnb"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.144851 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145021 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145307 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145345 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145464 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145555 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145565 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145585 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145646 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.145654 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.157633 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.157703 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.157796 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.157845 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.157949 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.159072 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.159498 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.159666 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.159766 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.160890 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5zv54"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.160996 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.206509 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.208278 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.209136 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.209203 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.210006 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.211673 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.212254 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.216232 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.216622 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.216926 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.216727 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.216764 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.216793 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.216818 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.217243 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.217434 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.217601 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.217708 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.217730 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.217897 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.217930 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218018 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218043 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218065 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218135 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218159 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218240 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218354 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218476 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218673 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218832 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.218933 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.219065 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.219084 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.219227 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.220996 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.221144 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.222343 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.223235 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.225799 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.226040 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.226247 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.226369 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.226426 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.226622 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-fb9n5"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.227546 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.228150 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.228700 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.235757 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-ftvnt"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.236349 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8zkfh"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.238087 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.240138 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.240424 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.240596 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.240846 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ftvnt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.241319 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.241343 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.241562 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.241710 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.241920 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.241971 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.242036 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.242138 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.242261 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.242376 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.242858 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.269494 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.271740 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.272547 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.274878 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.275225 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.274877 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.276017 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.276512 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-z2z69"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.278240 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.279733 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.279965 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.280141 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.280374 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.280467 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.281983 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.282329 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.282434 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.282456 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wdk2h"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.282535 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.282644 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.283413 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.284654 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.285352 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.285538 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.286383 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.286833 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.289798 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.289875 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.289948 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.290003 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.290003 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.290197 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.290230 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.290169 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.292027 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.293023 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.293048 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.294170 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.295410 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.298806 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rbghq"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.299497 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l9hbv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.300005 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.300522 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.300741 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-pld6r"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.301647 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.303051 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.303440 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.304013 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.305158 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-bn7hn"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.305882 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306166 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-serving-cert\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306203 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69b5n\" (UniqueName: \"kubernetes.io/projected/6c5c9d31-1572-4801-abb9-4cfdf49d1986-kube-api-access-69b5n\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306226 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-oauth-config\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306256 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c5c9d31-1572-4801-abb9-4cfdf49d1986-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306275 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-etcd-client\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306293 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b72e3dc5-6506-4f38-bd35-23abe3a44764-audit-dir\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306310 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-client-ca\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306339 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7zx8\" (UniqueName: \"kubernetes.io/projected/69127e92-f707-4b41-a690-9fd917998557-kube-api-access-p7zx8\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306356 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-etcd-client\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306381 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306399 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/22e821a6-f095-4c6e-ac9e-8484e31bd21e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306417 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306434 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b95e2aa-04fb-40c7-b729-da37e9cc2745-config\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306450 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-audit\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306466 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e87a63-ddac-4e72-9f32-aff82d073d08-serving-cert\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306538 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9999ca00-de82-4451-bfe9-c216be6edd43-serving-cert\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306558 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-serving-cert\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306576 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-encryption-config\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306591 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn9dv\" (UniqueName: \"kubernetes.io/projected/d05fca62-157f-4111-966b-0b1bad77fc76-kube-api-access-xn9dv\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306607 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-config\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306621 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/22e821a6-f095-4c6e-ac9e-8484e31bd21e-images\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306657 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fslhg\" (UniqueName: \"kubernetes.io/projected/22e821a6-f095-4c6e-ac9e-8484e31bd21e-kube-api-access-fslhg\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306948 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-etcd-serving-ca\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.306987 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6nkq\" (UniqueName: \"kubernetes.io/projected/9999ca00-de82-4451-bfe9-c216be6edd43-kube-api-access-l6nkq\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307009 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w25qw\" (UniqueName: \"kubernetes.io/projected/b72e3dc5-6506-4f38-bd35-23abe3a44764-kube-api-access-w25qw\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307026 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-client-ca\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307188 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d05fca62-157f-4111-966b-0b1bad77fc76-audit-dir\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307227 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-config\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307257 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-serving-cert\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307289 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b72e3dc5-6506-4f38-bd35-23abe3a44764-node-pullsecrets\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307329 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-config\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307360 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-console-config\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307379 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22e821a6-f095-4c6e-ac9e-8484e31bd21e-config\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307409 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307434 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22tqd\" (UniqueName: \"kubernetes.io/projected/99e87a63-ddac-4e72-9f32-aff82d073d08-kube-api-access-22tqd\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307460 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-audit-policies\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307479 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-oauth-serving-cert\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307497 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-encryption-config\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307526 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-service-ca\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307548 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b95e2aa-04fb-40c7-b729-da37e9cc2745-serving-cert\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307611 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307654 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2b95e2aa-04fb-40c7-b729-da37e9cc2745-trusted-ca\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307677 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zz4z\" (UniqueName: \"kubernetes.io/projected/2b95e2aa-04fb-40c7-b729-da37e9cc2745-kube-api-access-5zz4z\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307708 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c5c9d31-1572-4801-abb9-4cfdf49d1986-config\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307729 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-trusted-ca-bundle\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.307745 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-image-import-ca\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.308133 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.310787 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.314762 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.315733 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.318437 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.320031 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.323759 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.324383 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.326782 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.328693 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.330212 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.331734 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.333041 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.333474 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6jl5k"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.337462 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-64hwv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.337753 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.338976 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.339158 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.340255 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.341491 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.342765 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.344937 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.346037 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.347307 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fghbg"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.348624 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.349019 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.350637 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.352030 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r56ns"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.352976 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.353026 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.354214 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.355257 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5zv54"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.356567 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-z2z69"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.357610 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.358599 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.359572 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-fb9n5"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.360521 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.361489 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rqvnb"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.362488 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.363523 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-pld6r"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.364508 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.366062 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.367293 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-4dcwp"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.368499 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.370739 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-dlswd"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.371729 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.372131 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p5p9k"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.372780 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.373468 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.375226 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wdk2h"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.376517 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rbghq"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.378130 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.379815 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l9hbv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.381638 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.384309 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ftvnt"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.386821 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.394824 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.400426 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-xj4b2"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.406482 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.406490 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408124 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408628 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b95e2aa-04fb-40c7-b729-da37e9cc2745-serving-cert\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408655 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408672 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2b95e2aa-04fb-40c7-b729-da37e9cc2745-trusted-ca\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408693 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zz4z\" (UniqueName: \"kubernetes.io/projected/2b95e2aa-04fb-40c7-b729-da37e9cc2745-kube-api-access-5zz4z\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408717 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c5c9d31-1572-4801-abb9-4cfdf49d1986-config\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-trusted-ca-bundle\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408754 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-image-import-ca\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408775 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-serving-cert\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408798 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8lmv\" (UniqueName: \"kubernetes.io/projected/5502a710-ac52-4347-8c1f-095219735356-kube-api-access-m8lmv\") pod \"dns-operator-744455d44c-fb9n5\" (UID: \"5502a710-ac52-4347-8c1f-095219735356\") " pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408818 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69b5n\" (UniqueName: \"kubernetes.io/projected/6c5c9d31-1572-4801-abb9-4cfdf49d1986-kube-api-access-69b5n\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408836 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-oauth-config\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c5c9d31-1572-4801-abb9-4cfdf49d1986-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408879 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-etcd-client\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408894 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b72e3dc5-6506-4f38-bd35-23abe3a44764-audit-dir\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408915 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-client-ca\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408943 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7zx8\" (UniqueName: \"kubernetes.io/projected/69127e92-f707-4b41-a690-9fd917998557-kube-api-access-p7zx8\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408961 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-etcd-client\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.408989 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409012 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/22e821a6-f095-4c6e-ac9e-8484e31bd21e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409032 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409050 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b95e2aa-04fb-40c7-b729-da37e9cc2745-config\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409069 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-audit\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409126 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e87a63-ddac-4e72-9f32-aff82d073d08-serving-cert\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409143 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5502a710-ac52-4347-8c1f-095219735356-metrics-tls\") pod \"dns-operator-744455d44c-fb9n5\" (UID: \"5502a710-ac52-4347-8c1f-095219735356\") " pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409165 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9999ca00-de82-4451-bfe9-c216be6edd43-serving-cert\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409186 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-serving-cert\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409205 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn9dv\" (UniqueName: \"kubernetes.io/projected/d05fca62-157f-4111-966b-0b1bad77fc76-kube-api-access-xn9dv\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409226 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-config\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409255 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/22e821a6-f095-4c6e-ac9e-8484e31bd21e-images\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409277 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fslhg\" (UniqueName: \"kubernetes.io/projected/22e821a6-f095-4c6e-ac9e-8484e31bd21e-kube-api-access-fslhg\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409302 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-encryption-config\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409322 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-etcd-serving-ca\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409346 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w25qw\" (UniqueName: \"kubernetes.io/projected/b72e3dc5-6506-4f38-bd35-23abe3a44764-kube-api-access-w25qw\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409367 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6nkq\" (UniqueName: \"kubernetes.io/projected/9999ca00-de82-4451-bfe9-c216be6edd43-kube-api-access-l6nkq\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409389 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-client-ca\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409419 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-serving-cert\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409436 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d05fca62-157f-4111-966b-0b1bad77fc76-audit-dir\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409455 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-config\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409480 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b72e3dc5-6506-4f38-bd35-23abe3a44764-node-pullsecrets\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409501 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-config\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409518 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-console-config\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409535 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22e821a6-f095-4c6e-ac9e-8484e31bd21e-config\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409554 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409572 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22tqd\" (UniqueName: \"kubernetes.io/projected/99e87a63-ddac-4e72-9f32-aff82d073d08-kube-api-access-22tqd\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409590 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-oauth-serving-cert\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409607 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-encryption-config\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409630 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-audit-policies\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409648 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-service-ca\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409944 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.409949 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c5c9d31-1572-4801-abb9-4cfdf49d1986-config\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.410438 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-audit\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.410543 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2b95e2aa-04fb-40c7-b729-da37e9cc2745-trusted-ca\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.410698 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-service-ca\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.410834 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-trusted-ca-bundle\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.411141 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.411381 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b72e3dc5-6506-4f38-bd35-23abe3a44764-node-pullsecrets\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.411515 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-image-import-ca\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.411547 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b95e2aa-04fb-40c7-b729-da37e9cc2745-config\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.411427 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-oauth-serving-cert\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.411815 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d05fca62-157f-4111-966b-0b1bad77fc76-audit-dir\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.411890 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-audit-policies\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.412410 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.413258 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-config\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.413836 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-config\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.414109 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/22e821a6-f095-4c6e-ac9e-8484e31bd21e-images\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.414666 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-client-ca\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.414736 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.415394 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d05fca62-157f-4111-966b-0b1bad77fc76-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.415564 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b72e3dc5-6506-4f38-bd35-23abe3a44764-audit-dir\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.415904 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8zkfh"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.415963 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-64hwv"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.416112 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-config\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.416428 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e87a63-ddac-4e72-9f32-aff82d073d08-serving-cert\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.416451 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-serving-cert\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.416531 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2b95e2aa-04fb-40c7-b729-da37e9cc2745-serving-cert\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.416844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-client-ca\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.417197 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-encryption-config\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.417214 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9999ca00-de82-4451-bfe9-c216be6edd43-serving-cert\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.417426 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b72e3dc5-6506-4f38-bd35-23abe3a44764-etcd-serving-ca\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.417475 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/22e821a6-f095-4c6e-ac9e-8484e31bd21e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.417579 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-console-config\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.417716 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-etcd-client\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.418392 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-8pgq8"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.418523 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-serving-cert\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.418566 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b72e3dc5-6506-4f38-bd35-23abe3a44764-serving-cert\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.418546 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-encryption-config\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.419382 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.419480 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-oauth-config\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.419732 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.419842 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d05fca62-157f-4111-966b-0b1bad77fc76-etcd-client\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.420006 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c5c9d31-1572-4801-abb9-4cfdf49d1986-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.420794 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.421819 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5t4fd"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.423214 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.423289 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.424082 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xj4b2"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.425067 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.426302 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-dlswd"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.428235 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fghbg"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.429424 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.436785 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5t4fd"] Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.437857 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.454202 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.473059 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.493026 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.510339 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8lmv\" (UniqueName: \"kubernetes.io/projected/5502a710-ac52-4347-8c1f-095219735356-kube-api-access-m8lmv\") pod \"dns-operator-744455d44c-fb9n5\" (UID: \"5502a710-ac52-4347-8c1f-095219735356\") " pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.510435 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5502a710-ac52-4347-8c1f-095219735356-metrics-tls\") pod \"dns-operator-744455d44c-fb9n5\" (UID: \"5502a710-ac52-4347-8c1f-095219735356\") " pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.513051 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.532931 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.553696 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.573435 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.593579 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.609364 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22e821a6-f095-4c6e-ac9e-8484e31bd21e-config\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.609669 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5502a710-ac52-4347-8c1f-095219735356-metrics-tls\") pod \"dns-operator-744455d44c-fb9n5\" (UID: \"5502a710-ac52-4347-8c1f-095219735356\") " pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.613371 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.633772 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.653852 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.672598 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.693873 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.713233 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.733134 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.753563 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.773392 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.793183 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.812853 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.833001 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.853664 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.873754 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.892870 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.913216 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.933349 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.947103 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.947114 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.954153 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.973813 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jun 06 09:13:45 crc kubenswrapper[4911]: I0606 09:13:45.994065 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.013582 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.033897 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.053352 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.074165 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.093828 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.112797 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.134015 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.154442 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.173672 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.193587 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.212734 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.233840 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.254001 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.273777 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.313717 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.331070 4911 request.go:700] Waited for 1.012129518s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dpprof-cert&limit=500&resourceVersion=0 Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.332773 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.373872 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.393930 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: E0606 09:13:46.412025 4911 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Jun 06 09:13:46 crc kubenswrapper[4911]: E0606 09:13:46.412215 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles podName:99e87a63-ddac-4e72-9f32-aff82d073d08 nodeName:}" failed. No retries permitted until 2025-06-06 09:13:46.912179757 +0000 UTC m=+38.187605310 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles") pod "controller-manager-879f6c89f-p5p9k" (UID: "99e87a63-ddac-4e72-9f32-aff82d073d08") : failed to sync configmap cache: timed out waiting for the condition Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.413244 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.432895 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.453530 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.473532 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.494292 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.514149 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.533370 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.553249 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.573977 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.593983 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.615640 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.632962 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.654184 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.690514 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.693395 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.713376 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.733508 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.753015 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.775049 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.793870 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.822276 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.832973 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.854122 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.873625 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.893365 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.913883 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.927575 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.934193 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.946807 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.946889 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.953528 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jun 06 09:13:46 crc kubenswrapper[4911]: I0606 09:13:46.973642 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.002678 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.013226 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.034009 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.053696 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.074352 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.092450 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.115083 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.133308 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.153568 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.173631 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.193433 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.230499 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zz4z\" (UniqueName: \"kubernetes.io/projected/2b95e2aa-04fb-40c7-b729-da37e9cc2745-kube-api-access-5zz4z\") pod \"console-operator-58897d9998-5zv54\" (UID: \"2b95e2aa-04fb-40c7-b729-da37e9cc2745\") " pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.249458 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7zx8\" (UniqueName: \"kubernetes.io/projected/69127e92-f707-4b41-a690-9fd917998557-kube-api-access-p7zx8\") pod \"console-f9d7485db-4dcwp\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.267826 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fslhg\" (UniqueName: \"kubernetes.io/projected/22e821a6-f095-4c6e-ac9e-8484e31bd21e-kube-api-access-fslhg\") pod \"machine-api-operator-5694c8668f-r56ns\" (UID: \"22e821a6-f095-4c6e-ac9e-8484e31bd21e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.291025 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22tqd\" (UniqueName: \"kubernetes.io/projected/99e87a63-ddac-4e72-9f32-aff82d073d08-kube-api-access-22tqd\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.307789 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.308529 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w25qw\" (UniqueName: \"kubernetes.io/projected/b72e3dc5-6506-4f38-bd35-23abe3a44764-kube-api-access-w25qw\") pod \"apiserver-76f77b778f-rqvnb\" (UID: \"b72e3dc5-6506-4f38-bd35-23abe3a44764\") " pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.326940 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn9dv\" (UniqueName: \"kubernetes.io/projected/d05fca62-157f-4111-966b-0b1bad77fc76-kube-api-access-xn9dv\") pod \"apiserver-7bbb656c7d-82w5c\" (UID: \"d05fca62-157f-4111-966b-0b1bad77fc76\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.331350 4911 request.go:700] Waited for 1.917851705s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa/token Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.348766 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6nkq\" (UniqueName: \"kubernetes.io/projected/9999ca00-de82-4451-bfe9-c216be6edd43-kube-api-access-l6nkq\") pod \"route-controller-manager-6576b87f9c-bsqpb\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.370403 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69b5n\" (UniqueName: \"kubernetes.io/projected/6c5c9d31-1572-4801-abb9-4cfdf49d1986-kube-api-access-69b5n\") pod \"openshift-apiserver-operator-796bbdcf4f-77fpp\" (UID: \"6c5c9d31-1572-4801-abb9-4cfdf49d1986\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.373585 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.377686 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.392429 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.394370 4911 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.399045 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.415034 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.417555 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.424910 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.433831 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.476921 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.477824 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8lmv\" (UniqueName: \"kubernetes.io/projected/5502a710-ac52-4347-8c1f-095219735356-kube-api-access-m8lmv\") pod \"dns-operator-744455d44c-fb9n5\" (UID: \"5502a710-ac52-4347-8c1f-095219735356\") " pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.496612 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.514181 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.533670 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.535538 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:47 crc kubenswrapper[4911]: E0606 09:13:47.535805 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:14:03.535788822 +0000 UTC m=+54.811214365 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.559629 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.570944 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p5p9k\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.611203 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.613352 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.633883 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636728 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbs89\" (UniqueName: \"kubernetes.io/projected/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-kube-api-access-wbs89\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636770 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636802 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636825 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e3c39a95-76c2-42b7-a8e8-e1379e1236bf-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-pld6r\" (UID: \"e3c39a95-76c2-42b7-a8e8-e1379e1236bf\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636850 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/79fabc0e-2c89-4889-9a07-afa5306b20f8-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636878 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16222bda-92f4-483e-8fa8-6e701085186f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636919 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ea53c70-3632-45b8-a47f-16c6f3bc565a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636953 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/79fabc0e-2c89-4889-9a07-afa5306b20f8-proxy-tls\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.636977 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ed04786-6dd4-418a-a077-534b7e26fdab-serving-cert\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637011 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-bound-sa-token\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637114 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqslq\" (UniqueName: \"kubernetes.io/projected/3ed04786-6dd4-418a-a077-534b7e26fdab-kube-api-access-lqslq\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637140 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2b97\" (UniqueName: \"kubernetes.io/projected/79fabc0e-2c89-4889-9a07-afa5306b20f8-kube-api-access-r2b97\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637179 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/97427249-89c5-4e96-850e-0af8f860865a-signing-cabundle\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637206 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbph6\" (UniqueName: \"kubernetes.io/projected/97427249-89c5-4e96-850e-0af8f860865a-kube-api-access-fbph6\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637297 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-trusted-ca\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637356 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb386b0e-b615-4901-b3aa-0c947a397281-config\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637381 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-policies\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637460 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d7jh\" (UniqueName: \"kubernetes.io/projected/49294ad0-2f04-4c24-b5b5-974c2b3e5259-kube-api-access-6d7jh\") pod \"migrator-59844c95c7-mptr9\" (UID: \"49294ad0-2f04-4c24-b5b5-974c2b3e5259\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637485 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc05a058-b682-4b99-8eef-ba7c1acf1782-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637547 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.637988 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-ca\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.638087 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.638145 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ea53c70-3632-45b8-a47f-16c6f3bc565a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.638186 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.638247 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-certificates\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.638281 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: E0606 09:13:47.638340 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.138310857 +0000 UTC m=+39.413736470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.638975 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c695455e-b05b-4406-9471-b23af6eef3a1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.639581 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19840c29-1e2b-499f-8a15-714083bd64a2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.639620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16222bda-92f4-483e-8fa8-6e701085186f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.639695 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.639719 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/97427249-89c5-4e96-850e-0af8f860865a-signing-key\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.639737 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-config\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641348 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrtdp\" (UniqueName: \"kubernetes.io/projected/c9c6b446-7eb9-402d-b1c2-11e00895054a-kube-api-access-xrtdp\") pod \"cluster-samples-operator-665b6dd947-9b2sw\" (UID: \"c9c6b446-7eb9-402d-b1c2-11e00895054a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641395 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7f2h\" (UniqueName: \"kubernetes.io/projected/87f30b86-0303-493c-8919-e37e07f71709-kube-api-access-f7f2h\") pod \"downloads-7954f5f757-ftvnt\" (UID: \"87f30b86-0303-493c-8919-e37e07f71709\") " pod="openshift-console/downloads-7954f5f757-ftvnt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641439 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0ea53c70-3632-45b8-a47f-16c6f3bc565a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641498 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641523 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-service-ca-bundle\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641606 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641642 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c9c6b446-7eb9-402d-b1c2-11e00895054a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9b2sw\" (UID: \"c9c6b446-7eb9-402d-b1c2-11e00895054a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.641841 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.642067 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.642116 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-serving-cert\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.642172 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7tvz\" (UniqueName: \"kubernetes.io/projected/16222bda-92f4-483e-8fa8-6e701085186f-kube-api-access-k7tvz\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.642221 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85df889f-53b1-41d3-a6fa-9a05019ef05b-config\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.642370 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19840c29-1e2b-499f-8a15-714083bd64a2-config\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.642841 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.644071 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb386b0e-b615-4901-b3aa-0c947a397281-serving-cert\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.644148 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.644575 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8b4p\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-kube-api-access-c8b4p\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.644642 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-service-ca\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.645136 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc05a058-b682-4b99-8eef-ba7c1acf1782-config\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.645210 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-default-certificate\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.645355 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-tls\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.645442 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-images\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.645510 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2zsn\" (UniqueName: \"kubernetes.io/projected/e3c39a95-76c2-42b7-a8e8-e1379e1236bf-kube-api-access-t2zsn\") pod \"multus-admission-controller-857f4d67dd-pld6r\" (UID: \"e3c39a95-76c2-42b7-a8e8-e1379e1236bf\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.646377 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.646530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19840c29-1e2b-499f-8a15-714083bd64a2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.646582 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.647167 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.653542 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-metrics-certs\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.653587 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/c695455e-b05b-4406-9471-b23af6eef3a1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.653669 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/bb386b0e-b615-4901-b3aa-0c947a397281-kube-api-access-pgp9l\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.653932 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.653961 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2666\" (UniqueName: \"kubernetes.io/projected/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-kube-api-access-r2666\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.653983 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/85df889f-53b1-41d3-a6fa-9a05019ef05b-machine-approver-tls\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654018 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h9xp\" (UniqueName: \"kubernetes.io/projected/85df889f-53b1-41d3-a6fa-9a05019ef05b-kube-api-access-6h9xp\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654045 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654067 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4l7d\" (UniqueName: \"kubernetes.io/projected/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-kube-api-access-t4l7d\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654141 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654175 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-dir\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654205 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654229 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-client\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654259 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xgmr\" (UniqueName: \"kubernetes.io/projected/c695455e-b05b-4406-9471-b23af6eef3a1-kube-api-access-5xgmr\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654358 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3ed04786-6dd4-418a-a077-534b7e26fdab-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654502 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc05a058-b682-4b99-8eef-ba7c1acf1782-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654525 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-proxy-tls\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654571 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-stats-auth\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvgmt\" (UniqueName: \"kubernetes.io/projected/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-kube-api-access-pvgmt\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.654990 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.655023 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c695455e-b05b-4406-9471-b23af6eef3a1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.655306 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/85df889f-53b1-41d3-a6fa-9a05019ef05b-auth-proxy-config\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.655416 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.655730 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.659675 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.735614 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5zv54"] Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756201 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756777 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/97427249-89c5-4e96-850e-0af8f860865a-signing-cabundle\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756808 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbph6\" (UniqueName: \"kubernetes.io/projected/97427249-89c5-4e96-850e-0af8f860865a-kube-api-access-fbph6\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756832 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb386b0e-b615-4901-b3aa-0c947a397281-config\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756856 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-policies\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756877 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d7jh\" (UniqueName: \"kubernetes.io/projected/49294ad0-2f04-4c24-b5b5-974c2b3e5259-kube-api-access-6d7jh\") pod \"migrator-59844c95c7-mptr9\" (UID: \"49294ad0-2f04-4c24-b5b5-974c2b3e5259\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756901 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-trusted-ca\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756924 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc05a058-b682-4b99-8eef-ba7c1acf1782-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756970 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f95470f-d456-468e-83a7-e72420631662-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tgj9w\" (UID: \"9f95470f-d456-468e-83a7-e72420631662\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.756999 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-ca\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757021 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k82wx\" (UniqueName: \"kubernetes.io/projected/2654cb62-4a89-4c50-ab33-27c9273b7e82-kube-api-access-k82wx\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757140 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/02e23293-e9f2-4e5e-9aae-bdeb17c17823-cert\") pod \"ingress-canary-dlswd\" (UID: \"02e23293-e9f2-4e5e-9aae-bdeb17c17823\") " pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757165 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ea53c70-3632-45b8-a47f-16c6f3bc565a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757187 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757212 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/acf3bc54-04c0-416f-bf4a-541244ac0074-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-h4q7d\" (UID: \"acf3bc54-04c0-416f-bf4a-541244ac0074\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757241 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-certificates\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757264 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757298 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757320 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2018465-ff54-4f01-aef9-a87a2973d419-trusted-ca\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757345 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c695455e-b05b-4406-9471-b23af6eef3a1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757368 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2018465-ff54-4f01-aef9-a87a2973d419-metrics-tls\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757389 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-ready\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757415 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19840c29-1e2b-499f-8a15-714083bd64a2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757434 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16222bda-92f4-483e-8fa8-6e701085186f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757462 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73326677-9e72-46c9-8dad-c0bfe47d599f-webhook-cert\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757499 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppg74\" (UniqueName: \"kubernetes.io/projected/843168d2-cb7b-42e3-bfe9-63e012c28428-kube-api-access-ppg74\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757525 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb4tz\" (UniqueName: \"kubernetes.io/projected/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-kube-api-access-vb4tz\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757556 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757576 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31704392-a75e-4c29-b905-778ae0b34fb6-serving-cert\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757593 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njbq7\" (UniqueName: \"kubernetes.io/projected/36152524-008c-4ae4-9b35-70aceacddd34-kube-api-access-njbq7\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757615 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/97427249-89c5-4e96-850e-0af8f860865a-signing-key\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757633 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-metrics-tls\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757650 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7f2h\" (UniqueName: \"kubernetes.io/projected/87f30b86-0303-493c-8919-e37e07f71709-kube-api-access-f7f2h\") pod \"downloads-7954f5f757-ftvnt\" (UID: \"87f30b86-0303-493c-8919-e37e07f71709\") " pod="openshift-console/downloads-7954f5f757-ftvnt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757668 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0ea53c70-3632-45b8-a47f-16c6f3bc565a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757685 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-socket-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757728 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-config\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757755 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrtdp\" (UniqueName: \"kubernetes.io/projected/c9c6b446-7eb9-402d-b1c2-11e00895054a-kube-api-access-xrtdp\") pod \"cluster-samples-operator-665b6dd947-9b2sw\" (UID: \"c9c6b446-7eb9-402d-b1c2-11e00895054a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757795 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-service-ca-bundle\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757817 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-service-ca-bundle\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757842 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ct48\" (UniqueName: \"kubernetes.io/projected/73326677-9e72-46c9-8dad-c0bfe47d599f-kube-api-access-2ct48\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757867 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c9c6b446-7eb9-402d-b1c2-11e00895054a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9b2sw\" (UID: \"c9c6b446-7eb9-402d-b1c2-11e00895054a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757890 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757915 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757956 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.757981 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-serving-cert\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.758003 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7tvz\" (UniqueName: \"kubernetes.io/projected/16222bda-92f4-483e-8fa8-6e701085186f-kube-api-access-k7tvz\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.758023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19840c29-1e2b-499f-8a15-714083bd64a2-config\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.758043 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqpww\" (UniqueName: \"kubernetes.io/projected/573af29b-3e41-4b58-aec9-8bbfe7845920-kube-api-access-bqpww\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.758932 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85df889f-53b1-41d3-a6fa-9a05019ef05b-config\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.758971 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bffj2\" (UniqueName: \"kubernetes.io/projected/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-kube-api-access-bffj2\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759037 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4xhs\" (UniqueName: \"kubernetes.io/projected/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-kube-api-access-l4xhs\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759063 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-secret-volume\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759108 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb386b0e-b615-4901-b3aa-0c947a397281-serving-cert\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759134 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759158 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2018465-ff54-4f01-aef9-a87a2973d419-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759184 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8b4p\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-kube-api-access-c8b4p\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-service-ca\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759231 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-default-certificate\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759269 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc05a058-b682-4b99-8eef-ba7c1acf1782-config\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759299 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-tls\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759321 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-images\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759359 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2zsn\" (UniqueName: \"kubernetes.io/projected/e3c39a95-76c2-42b7-a8e8-e1379e1236bf-kube-api-access-t2zsn\") pod \"multus-admission-controller-857f4d67dd-pld6r\" (UID: \"e3c39a95-76c2-42b7-a8e8-e1379e1236bf\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759402 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-config\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759447 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759474 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19840c29-1e2b-499f-8a15-714083bd64a2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759518 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-metrics-certs\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759542 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759566 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/c695455e-b05b-4406-9471-b23af6eef3a1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759589 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-mountpoint-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759612 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbd2m\" (UniqueName: \"kubernetes.io/projected/acf3bc54-04c0-416f-bf4a-541244ac0074-kube-api-access-lbd2m\") pod \"control-plane-machine-set-operator-78cbb6b69f-h4q7d\" (UID: \"acf3bc54-04c0-416f-bf4a-541244ac0074\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759636 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/bb386b0e-b615-4901-b3aa-0c947a397281-kube-api-access-pgp9l\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759663 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759690 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2666\" (UniqueName: \"kubernetes.io/projected/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-kube-api-access-r2666\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759721 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h9xp\" (UniqueName: \"kubernetes.io/projected/85df889f-53b1-41d3-a6fa-9a05019ef05b-kube-api-access-6h9xp\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759764 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4l7d\" (UniqueName: \"kubernetes.io/projected/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-kube-api-access-t4l7d\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759788 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/85df889f-53b1-41d3-a6fa-9a05019ef05b-machine-approver-tls\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759812 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/73326677-9e72-46c9-8dad-c0bfe47d599f-tmpfs\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759838 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-dir\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759884 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqf78\" (UniqueName: \"kubernetes.io/projected/5852e56b-1121-4a77-a104-225400bdf58b-kube-api-access-tqf78\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759883 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-ca\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.759906 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-client\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.760890 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xgmr\" (UniqueName: \"kubernetes.io/projected/c695455e-b05b-4406-9471-b23af6eef3a1-kube-api-access-5xgmr\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.760934 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3ed04786-6dd4-418a-a077-534b7e26fdab-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.760971 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp5lf\" (UniqueName: \"kubernetes.io/projected/f2018465-ff54-4f01-aef9-a87a2973d419-kube-api-access-qp5lf\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761007 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-plugins-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761038 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36152524-008c-4ae4-9b35-70aceacddd34-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761106 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-proxy-tls\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761133 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc05a058-b682-4b99-8eef-ba7c1acf1782-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761157 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-registration-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761182 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvgmt\" (UniqueName: \"kubernetes.io/projected/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-kube-api-access-pvgmt\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761204 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/843168d2-cb7b-42e3-bfe9-63e012c28428-certs\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761225 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-config-volume\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761264 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-stats-auth\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761308 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg6tr\" (UniqueName: \"kubernetes.io/projected/31704392-a75e-4c29-b905-778ae0b34fb6-kube-api-access-wg6tr\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761328 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-certificates\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761334 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761632 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c695455e-b05b-4406-9471-b23af6eef3a1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761664 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtcfg\" (UniqueName: \"kubernetes.io/projected/b2220da6-a793-45c1-923b-36bda543a176-kube-api-access-jtcfg\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761692 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/85df889f-53b1-41d3-a6fa-9a05019ef05b-auth-proxy-config\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761731 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761790 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/843168d2-cb7b-42e3-bfe9-63e012c28428-node-bootstrap-token\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761801 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.761903 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.762710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc05a058-b682-4b99-8eef-ba7c1acf1782-config\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.763720 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-client\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.762187 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85df889f-53b1-41d3-a6fa-9a05019ef05b-config\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.764608 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.765129 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.765191 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.765352 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-config\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.765649 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.766415 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-service-ca-bundle\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.766497 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c695455e-b05b-4406-9471-b23af6eef3a1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.766763 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb386b0e-b615-4901-b3aa-0c947a397281-serving-cert\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.767154 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3ed04786-6dd4-418a-a077-534b7e26fdab-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.767535 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb386b0e-b615-4901-b3aa-0c947a397281-config\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.767909 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc05a058-b682-4b99-8eef-ba7c1acf1782-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.767927 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/85df889f-53b1-41d3-a6fa-9a05019ef05b-auth-proxy-config\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.768987 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-trusted-ca\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.769077 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-policies\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.769479 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/c695455e-b05b-4406-9471-b23af6eef3a1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.770344 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c9c6b446-7eb9-402d-b1c2-11e00895054a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9b2sw\" (UID: \"c9c6b446-7eb9-402d-b1c2-11e00895054a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.770516 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19840c29-1e2b-499f-8a15-714083bd64a2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.770470 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-proxy-tls\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.770708 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/97427249-89c5-4e96-850e-0af8f860865a-signing-cabundle\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.770741 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.770733 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.771480 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.771527 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-dir\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772036 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbs89\" (UniqueName: \"kubernetes.io/projected/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-kube-api-access-wbs89\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772071 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772135 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772161 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e3c39a95-76c2-42b7-a8e8-e1379e1236bf-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-pld6r\" (UID: \"e3c39a95-76c2-42b7-a8e8-e1379e1236bf\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772185 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/79fabc0e-2c89-4889-9a07-afa5306b20f8-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772212 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4rrn\" (UniqueName: \"kubernetes.io/projected/9f95470f-d456-468e-83a7-e72420631662-kube-api-access-c4rrn\") pod \"package-server-manager-789f6589d5-tgj9w\" (UID: \"9f95470f-d456-468e-83a7-e72420631662\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772233 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b2220da6-a793-45c1-923b-36bda543a176-srv-cert\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772254 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16222bda-92f4-483e-8fa8-6e701085186f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772272 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5852e56b-1121-4a77-a104-225400bdf58b-srv-cert\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772338 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73326677-9e72-46c9-8dad-c0bfe47d599f-apiservice-cert\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772367 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ea53c70-3632-45b8-a47f-16c6f3bc565a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772395 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772421 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5852e56b-1121-4a77-a104-225400bdf58b-profile-collector-cert\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772788 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-config-volume\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772821 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36152524-008c-4ae4-9b35-70aceacddd34-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/79fabc0e-2c89-4889-9a07-afa5306b20f8-proxy-tls\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772889 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-csi-data-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772918 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772940 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b2220da6-a793-45c1-923b-36bda543a176-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.772968 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.773147 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.773342 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.773635 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-metrics-certs\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.773708 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-images\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.773876 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19840c29-1e2b-499f-8a15-714083bd64a2-config\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774277 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-etcd-service-ca\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774344 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-bound-sa-token\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774385 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ed04786-6dd4-418a-a077-534b7e26fdab-serving-cert\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774484 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774552 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ea53c70-3632-45b8-a47f-16c6f3bc565a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774575 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqslq\" (UniqueName: \"kubernetes.io/projected/3ed04786-6dd4-418a-a077-534b7e26fdab-kube-api-access-lqslq\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774599 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2b97\" (UniqueName: \"kubernetes.io/projected/79fabc0e-2c89-4889-9a07-afa5306b20f8-kube-api-access-r2b97\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774696 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4ns8\" (UniqueName: \"kubernetes.io/projected/02e23293-e9f2-4e5e-9aae-bdeb17c17823-kube-api-access-b4ns8\") pod \"ingress-canary-dlswd\" (UID: \"02e23293-e9f2-4e5e-9aae-bdeb17c17823\") " pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.774942 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/79fabc0e-2c89-4889-9a07-afa5306b20f8-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: E0606 09:13:47.775654 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.275613006 +0000 UTC m=+39.551038549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.775932 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/85df889f-53b1-41d3-a6fa-9a05019ef05b-machine-approver-tls\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.776317 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16222bda-92f4-483e-8fa8-6e701085186f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.777002 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ea53c70-3632-45b8-a47f-16c6f3bc565a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.777207 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.779002 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/97427249-89c5-4e96-850e-0af8f860865a-signing-key\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.779500 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.780550 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-serving-cert\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.782679 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16222bda-92f4-483e-8fa8-6e701085186f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.782826 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-default-certificate\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.783195 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.785424 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ed04786-6dd4-418a-a077-534b7e26fdab-serving-cert\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.785431 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.786158 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-stats-auth\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.786587 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-tls\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.787049 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/79fabc0e-2c89-4889-9a07-afa5306b20f8-proxy-tls\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.787304 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e3c39a95-76c2-42b7-a8e8-e1379e1236bf-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-pld6r\" (UID: \"e3c39a95-76c2-42b7-a8e8-e1379e1236bf\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.791310 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r56ns"] Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.799626 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7f2h\" (UniqueName: \"kubernetes.io/projected/87f30b86-0303-493c-8919-e37e07f71709-kube-api-access-f7f2h\") pod \"downloads-7954f5f757-ftvnt\" (UID: \"87f30b86-0303-493c-8919-e37e07f71709\") " pod="openshift-console/downloads-7954f5f757-ftvnt" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.799718 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p5p9k"] Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.801311 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp"] Jun 06 09:13:47 crc kubenswrapper[4911]: W0606 09:13:47.808120 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22e821a6_f095_4c6e_ac9e_8484e31bd21e.slice/crio-d8d74e0e7be335715aecae1498181cd8d8c8eea72ad6ee5f0a945dd8956f9270 WatchSource:0}: Error finding container d8d74e0e7be335715aecae1498181cd8d8c8eea72ad6ee5f0a945dd8956f9270: Status 404 returned error can't find the container with id d8d74e0e7be335715aecae1498181cd8d8c8eea72ad6ee5f0a945dd8956f9270 Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.808315 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19840c29-1e2b-499f-8a15-714083bd64a2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kwg9g\" (UID: \"19840c29-1e2b-499f-8a15-714083bd64a2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.836847 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2666\" (UniqueName: \"kubernetes.io/projected/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-kube-api-access-r2666\") pod \"oauth-openshift-558db77b4-z2z69\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.846427 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb"] Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.846983 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/bb386b0e-b615-4901-b3aa-0c947a397281-kube-api-access-pgp9l\") pod \"service-ca-operator-777779d784-rbghq\" (UID: \"bb386b0e-b615-4901-b3aa-0c947a397281\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.864032 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.865976 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rqvnb"] Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.867272 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-4dcwp"] Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.868316 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c"] Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875572 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875622 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-mountpoint-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875653 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbd2m\" (UniqueName: \"kubernetes.io/projected/acf3bc54-04c0-416f-bf4a-541244ac0074-kube-api-access-lbd2m\") pod \"control-plane-machine-set-operator-78cbb6b69f-h4q7d\" (UID: \"acf3bc54-04c0-416f-bf4a-541244ac0074\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875691 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqf78\" (UniqueName: \"kubernetes.io/projected/5852e56b-1121-4a77-a104-225400bdf58b-kube-api-access-tqf78\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875708 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/73326677-9e72-46c9-8dad-c0bfe47d599f-tmpfs\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp5lf\" (UniqueName: \"kubernetes.io/projected/f2018465-ff54-4f01-aef9-a87a2973d419-kube-api-access-qp5lf\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875765 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-registration-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875781 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-plugins-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875799 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36152524-008c-4ae4-9b35-70aceacddd34-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875817 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-config-volume\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875843 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/843168d2-cb7b-42e3-bfe9-63e012c28428-certs\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg6tr\" (UniqueName: \"kubernetes.io/projected/31704392-a75e-4c29-b905-778ae0b34fb6-kube-api-access-wg6tr\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875889 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtcfg\" (UniqueName: \"kubernetes.io/projected/b2220da6-a793-45c1-923b-36bda543a176-kube-api-access-jtcfg\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875913 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/843168d2-cb7b-42e3-bfe9-63e012c28428-node-bootstrap-token\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875933 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4rrn\" (UniqueName: \"kubernetes.io/projected/9f95470f-d456-468e-83a7-e72420631662-kube-api-access-c4rrn\") pod \"package-server-manager-789f6589d5-tgj9w\" (UID: \"9f95470f-d456-468e-83a7-e72420631662\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875952 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b2220da6-a793-45c1-923b-36bda543a176-srv-cert\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875968 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73326677-9e72-46c9-8dad-c0bfe47d599f-apiservice-cert\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.875986 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5852e56b-1121-4a77-a104-225400bdf58b-srv-cert\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876005 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876022 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5852e56b-1121-4a77-a104-225400bdf58b-profile-collector-cert\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876041 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-config-volume\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876058 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36152524-008c-4ae4-9b35-70aceacddd34-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876078 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876110 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b2220da6-a793-45c1-923b-36bda543a176-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876130 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-csi-data-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876171 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4ns8\" (UniqueName: \"kubernetes.io/projected/02e23293-e9f2-4e5e-9aae-bdeb17c17823-kube-api-access-b4ns8\") pod \"ingress-canary-dlswd\" (UID: \"02e23293-e9f2-4e5e-9aae-bdeb17c17823\") " pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876222 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876243 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f95470f-d456-468e-83a7-e72420631662-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tgj9w\" (UID: \"9f95470f-d456-468e-83a7-e72420631662\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876261 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k82wx\" (UniqueName: \"kubernetes.io/projected/2654cb62-4a89-4c50-ab33-27c9273b7e82-kube-api-access-k82wx\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876279 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/02e23293-e9f2-4e5e-9aae-bdeb17c17823-cert\") pod \"ingress-canary-dlswd\" (UID: \"02e23293-e9f2-4e5e-9aae-bdeb17c17823\") " pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876297 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/acf3bc54-04c0-416f-bf4a-541244ac0074-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-h4q7d\" (UID: \"acf3bc54-04c0-416f-bf4a-541244ac0074\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876317 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876333 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2018465-ff54-4f01-aef9-a87a2973d419-trusted-ca\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876350 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-ready\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876371 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2018465-ff54-4f01-aef9-a87a2973d419-metrics-tls\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876388 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73326677-9e72-46c9-8dad-c0bfe47d599f-webhook-cert\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876414 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppg74\" (UniqueName: \"kubernetes.io/projected/843168d2-cb7b-42e3-bfe9-63e012c28428-kube-api-access-ppg74\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876434 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb4tz\" (UniqueName: \"kubernetes.io/projected/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-kube-api-access-vb4tz\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876451 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31704392-a75e-4c29-b905-778ae0b34fb6-serving-cert\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876471 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njbq7\" (UniqueName: \"kubernetes.io/projected/36152524-008c-4ae4-9b35-70aceacddd34-kube-api-access-njbq7\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876489 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-metrics-tls\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876506 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-socket-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876536 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-service-ca-bundle\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876555 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ct48\" (UniqueName: \"kubernetes.io/projected/73326677-9e72-46c9-8dad-c0bfe47d599f-kube-api-access-2ct48\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876573 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876601 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqpww\" (UniqueName: \"kubernetes.io/projected/573af29b-3e41-4b58-aec9-8bbfe7845920-kube-api-access-bqpww\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876618 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bffj2\" (UniqueName: \"kubernetes.io/projected/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-kube-api-access-bffj2\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876639 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4xhs\" (UniqueName: \"kubernetes.io/projected/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-kube-api-access-l4xhs\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876640 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0ea53c70-3632-45b8-a47f-16c6f3bc565a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xtd6h\" (UID: \"0ea53c70-3632-45b8-a47f-16c6f3bc565a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876669 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-secret-volume\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876763 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36152524-008c-4ae4-9b35-70aceacddd34-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2018465-ff54-4f01-aef9-a87a2973d419-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.876908 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-config\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.877081 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-registration-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.877154 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-plugins-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.877174 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.877192 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-mountpoint-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.877337 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/73326677-9e72-46c9-8dad-c0bfe47d599f-tmpfs\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.877624 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-config\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.878389 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-socket-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.878925 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2654cb62-4a89-4c50-ab33-27c9273b7e82-csi-data-dir\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.878934 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.879334 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-ready\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.879548 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-config-volume\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.880006 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.880318 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2018465-ff54-4f01-aef9-a87a2973d419-trusted-ca\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.880673 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-config-volume\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.880727 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31704392-a75e-4c29-b905-778ae0b34fb6-service-ca-bundle\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.880691 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2018465-ff54-4f01-aef9-a87a2973d419-metrics-tls\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.880855 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: E0606 09:13:47.881213 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.38119663 +0000 UTC m=+39.656622173 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.884376 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-metrics-tls\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.884411 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.884685 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5852e56b-1121-4a77-a104-225400bdf58b-profile-collector-cert\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.885847 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36152524-008c-4ae4-9b35-70aceacddd34-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.886109 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/843168d2-cb7b-42e3-bfe9-63e012c28428-certs\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.886271 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b2220da6-a793-45c1-923b-36bda543a176-srv-cert\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.886498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b2220da6-a793-45c1-923b-36bda543a176-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.886844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f95470f-d456-468e-83a7-e72420631662-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tgj9w\" (UID: \"9f95470f-d456-468e-83a7-e72420631662\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.888200 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31704392-a75e-4c29-b905-778ae0b34fb6-serving-cert\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.889328 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/843168d2-cb7b-42e3-bfe9-63e012c28428-node-bootstrap-token\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.889746 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/73326677-9e72-46c9-8dad-c0bfe47d599f-webhook-cert\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.889903 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/73326677-9e72-46c9-8dad-c0bfe47d599f-apiservice-cert\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.890317 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-secret-volume\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.890414 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/02e23293-e9f2-4e5e-9aae-bdeb17c17823-cert\") pod \"ingress-canary-dlswd\" (UID: \"02e23293-e9f2-4e5e-9aae-bdeb17c17823\") " pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.890422 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5852e56b-1121-4a77-a104-225400bdf58b-srv-cert\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.898156 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/acf3bc54-04c0-416f-bf4a-541244ac0074-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-h4q7d\" (UID: \"acf3bc54-04c0-416f-bf4a-541244ac0074\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.898155 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.911733 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.915740 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h9xp\" (UniqueName: \"kubernetes.io/projected/85df889f-53b1-41d3-a6fa-9a05019ef05b-kube-api-access-6h9xp\") pod \"machine-approver-56656f9798-46t9l\" (UID: \"85df889f-53b1-41d3-a6fa-9a05019ef05b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.931876 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4l7d\" (UniqueName: \"kubernetes.io/projected/b1afd53c-ecea-46b7-ae7f-9313ab7b9a02-kube-api-access-t4l7d\") pod \"etcd-operator-b45778765-wdk2h\" (UID: \"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.955131 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrtdp\" (UniqueName: \"kubernetes.io/projected/c9c6b446-7eb9-402d-b1c2-11e00895054a-kube-api-access-xrtdp\") pod \"cluster-samples-operator-665b6dd947-9b2sw\" (UID: \"c9c6b446-7eb9-402d-b1c2-11e00895054a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.976086 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xgmr\" (UniqueName: \"kubernetes.io/projected/c695455e-b05b-4406-9471-b23af6eef3a1-kube-api-access-5xgmr\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.978802 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:47 crc kubenswrapper[4911]: E0606 09:13:47.978972 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.478942316 +0000 UTC m=+39.754367859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.979414 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:47 crc kubenswrapper[4911]: E0606 09:13:47.979852 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.479841737 +0000 UTC m=+39.755267280 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:47 crc kubenswrapper[4911]: I0606 09:13:47.991428 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbph6\" (UniqueName: \"kubernetes.io/projected/97427249-89c5-4e96-850e-0af8f860865a-kube-api-access-fbph6\") pod \"service-ca-9c57cc56f-l9hbv\" (UID: \"97427249-89c5-4e96-850e-0af8f860865a\") " pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.013146 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d7jh\" (UniqueName: \"kubernetes.io/projected/49294ad0-2f04-4c24-b5b5-974c2b3e5259-kube-api-access-6d7jh\") pod \"migrator-59844c95c7-mptr9\" (UID: \"49294ad0-2f04-4c24-b5b5-974c2b3e5259\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.027828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvgmt\" (UniqueName: \"kubernetes.io/projected/d6a72352-dbe3-43f1-aae4-c3a6bb160ff1-kube-api-access-pvgmt\") pod \"router-default-5444994796-bn7hn\" (UID: \"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1\") " pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.046067 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.052312 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8b4p\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-kube-api-access-c8b4p\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.059219 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-fb9n5"] Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.061871 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-95f5762d78350e994de0b2de2ccd411af8c8885db5e4b796bd477f05fee9f7ea WatchSource:0}: Error finding container 95f5762d78350e994de0b2de2ccd411af8c8885db5e4b796bd477f05fee9f7ea: Status 404 returned error can't find the container with id 95f5762d78350e994de0b2de2ccd411af8c8885db5e4b796bd477f05fee9f7ea Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.066442 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5502a710_ac52_4347_8c1f_095219735356.slice/crio-f5a2ebee6eda0f58382c63fc5bef6fecf5501c558ede63c866b5e24e66fa6d3e WatchSource:0}: Error finding container f5a2ebee6eda0f58382c63fc5bef6fecf5501c558ede63c866b5e24e66fa6d3e: Status 404 returned error can't find the container with id f5a2ebee6eda0f58382c63fc5bef6fecf5501c558ede63c866b5e24e66fa6d3e Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.068752 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2zsn\" (UniqueName: \"kubernetes.io/projected/e3c39a95-76c2-42b7-a8e8-e1379e1236bf-kube-api-access-t2zsn\") pod \"multus-admission-controller-857f4d67dd-pld6r\" (UID: \"e3c39a95-76c2-42b7-a8e8-e1379e1236bf\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.078379 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ftvnt" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.081316 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.081627 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.581580983 +0000 UTC m=+39.857006536 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.082897 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.083526 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.583503397 +0000 UTC m=+39.858928940 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.093173 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7tvz\" (UniqueName: \"kubernetes.io/projected/16222bda-92f4-483e-8fa8-6e701085186f-kube-api-access-k7tvz\") pod \"openshift-controller-manager-operator-756b6f6bc6-bt8wp\" (UID: \"16222bda-92f4-483e-8fa8-6e701085186f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.106641 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.113193 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc05a058-b682-4b99-8eef-ba7c1acf1782-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cxj7k\" (UID: \"bc05a058-b682-4b99-8eef-ba7c1acf1782\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.115801 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.134568 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbs89\" (UniqueName: \"kubernetes.io/projected/cea8e043-ab75-4ad6-aa10-8210fb5ee7a1-kube-api-access-wbs89\") pod \"machine-config-operator-74547568cd-fsrd5\" (UID: \"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.150931 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c695455e-b05b-4406-9471-b23af6eef3a1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c2x4t\" (UID: \"c695455e-b05b-4406-9471-b23af6eef3a1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.162830 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.168142 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.172001 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-bound-sa-token\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.183710 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.184744 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.184902 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.684875414 +0000 UTC m=+39.960300957 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.185515 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.185859 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.685844296 +0000 UTC m=+39.961269839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.189940 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.195748 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2b97\" (UniqueName: \"kubernetes.io/projected/79fabc0e-2c89-4889-9a07-afa5306b20f8-kube-api-access-r2b97\") pod \"machine-config-controller-84d6567774-fp5lv\" (UID: \"79fabc0e-2c89-4889-9a07-afa5306b20f8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.205670 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.208397 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqslq\" (UniqueName: \"kubernetes.io/projected/3ed04786-6dd4-418a-a077-534b7e26fdab-kube-api-access-lqslq\") pod \"openshift-config-operator-7777fb866f-wb7fj\" (UID: \"3ed04786-6dd4-418a-a077-534b7e26fdab\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.230697 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.234898 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.256957 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqf78\" (UniqueName: \"kubernetes.io/projected/5852e56b-1121-4a77-a104-225400bdf58b-kube-api-access-tqf78\") pod \"catalog-operator-68c6474976-99c44\" (UID: \"5852e56b-1121-4a77-a104-225400bdf58b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.273866 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp5lf\" (UniqueName: \"kubernetes.io/projected/f2018465-ff54-4f01-aef9-a87a2973d419-kube-api-access-qp5lf\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.286899 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.287614 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.787545822 +0000 UTC m=+40.062971505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.297245 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbd2m\" (UniqueName: \"kubernetes.io/projected/acf3bc54-04c0-416f-bf4a-541244ac0074-kube-api-access-lbd2m\") pod \"control-plane-machine-set-operator-78cbb6b69f-h4q7d\" (UID: \"acf3bc54-04c0-416f-bf4a-541244ac0074\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.321441 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njbq7\" (UniqueName: \"kubernetes.io/projected/36152524-008c-4ae4-9b35-70aceacddd34-kube-api-access-njbq7\") pod \"kube-storage-version-migrator-operator-b67b599dd-s9cdv\" (UID: \"36152524-008c-4ae4-9b35-70aceacddd34\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.323758 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.325290 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rbghq"] Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.334950 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqpww\" (UniqueName: \"kubernetes.io/projected/573af29b-3e41-4b58-aec9-8bbfe7845920-kube-api-access-bqpww\") pod \"marketplace-operator-79b997595-64hwv\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.339702 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.345544 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5zv54" event={"ID":"2b95e2aa-04fb-40c7-b729-da37e9cc2745","Type":"ContainerStarted","Data":"b3c1d178bbb740271594d0068cae6b4552441ac2fd9a9ee3412b87ecf4513bfe"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.345594 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5zv54" event={"ID":"2b95e2aa-04fb-40c7-b729-da37e9cc2745","Type":"ContainerStarted","Data":"21c45d0a6f0eb2f1899a2b9b22408225472a2b14ef016bbed46a94caae417238"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.346167 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.346628 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" event={"ID":"5502a710-ac52-4347-8c1f-095219735356","Type":"ContainerStarted","Data":"f5a2ebee6eda0f58382c63fc5bef6fecf5501c558ede63c866b5e24e66fa6d3e"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.347853 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" event={"ID":"9999ca00-de82-4451-bfe9-c216be6edd43","Type":"ContainerStarted","Data":"7b9c8cf9dfe9ea2094b74f461fa392da2cd553b950ec9c809ff56f49cda9bfa8"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.349280 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4xhs\" (UniqueName: \"kubernetes.io/projected/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-kube-api-access-l4xhs\") pod \"cni-sysctl-allowlist-ds-8pgq8\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.349722 4911 patch_prober.go:28] interesting pod/console-operator-58897d9998-5zv54 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.349761 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-5zv54" podUID="2b95e2aa-04fb-40c7-b729-da37e9cc2745" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.350170 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.354789 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.358686 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" event={"ID":"22e821a6-f095-4c6e-ac9e-8484e31bd21e","Type":"ContainerStarted","Data":"7bf2b662eb3d34a3aab85b6cf0b6b00da3b2c136878e53bd830f11390a72e7b4"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.358728 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" event={"ID":"22e821a6-f095-4c6e-ac9e-8484e31bd21e","Type":"ContainerStarted","Data":"d8d74e0e7be335715aecae1498181cd8d8c8eea72ad6ee5f0a945dd8956f9270"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.363624 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.363995 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" event={"ID":"b72e3dc5-6506-4f38-bd35-23abe3a44764","Type":"ContainerStarted","Data":"bb155d533c19d2d84e0700f84346c61f0c91b28ae906f108b4bed1d92cd6f64e"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.365691 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" event={"ID":"d05fca62-157f-4111-966b-0b1bad77fc76","Type":"ContainerStarted","Data":"f503237530e5791a90c9b973b21e53cb6dcd5d63f75faef13621de2adb815859"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.367938 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bffj2\" (UniqueName: \"kubernetes.io/projected/53b9bdc5-cb4e-4ee1-be2f-d58d0c554614-kube-api-access-bffj2\") pod \"dns-default-xj4b2\" (UID: \"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614\") " pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.369336 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" event={"ID":"99e87a63-ddac-4e72-9f32-aff82d073d08","Type":"ContainerStarted","Data":"bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.369364 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" event={"ID":"99e87a63-ddac-4e72-9f32-aff82d073d08","Type":"ContainerStarted","Data":"4d7fbef068f0156b6818e0092184cefd8ba13748e77c3eb184a8e5820c38fc35"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.370074 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.372726 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g"] Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.373025 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c922d45a73012f7a76ec77d57313aedcbb70124c164ec62800a9f4f621f03c69"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.374651 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" event={"ID":"6c5c9d31-1572-4801-abb9-4cfdf49d1986","Type":"ContainerStarted","Data":"3b59ef53a589d471f38cba22440399ce433e062ea4772db2baf67d13dea35883"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.374682 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" event={"ID":"6c5c9d31-1572-4801-abb9-4cfdf49d1986","Type":"ContainerStarted","Data":"d1dcbb2455b56e8d1d656455e512ae60157f5cb2fb669dd95de43f29bde27701"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.376297 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-4dcwp" event={"ID":"69127e92-f707-4b41-a690-9fd917998557","Type":"ContainerStarted","Data":"6b4d55365de2ddd5d4f4f944202c0e141b3bb15a2f673d0894dd997aca5408fd"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.376420 4911 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-p5p9k container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.376458 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" podUID="99e87a63-ddac-4e72-9f32-aff82d073d08" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.378988 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"95f5762d78350e994de0b2de2ccd411af8c8885db5e4b796bd477f05fee9f7ea"} Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.388121 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.388605 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.888552562 +0000 UTC m=+40.163978105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.394428 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.403420 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2018465-ff54-4f01-aef9-a87a2973d419-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jtrxq\" (UID: \"f2018465-ff54-4f01-aef9-a87a2973d419\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.418532 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppg74\" (UniqueName: \"kubernetes.io/projected/843168d2-cb7b-42e3-bfe9-63e012c28428-kube-api-access-ppg74\") pod \"machine-config-server-6jl5k\" (UID: \"843168d2-cb7b-42e3-bfe9-63e012c28428\") " pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.437841 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ct48\" (UniqueName: \"kubernetes.io/projected/73326677-9e72-46c9-8dad-c0bfe47d599f-kube-api-access-2ct48\") pod \"packageserver-d55dfcdfc-r4hl5\" (UID: \"73326677-9e72-46c9-8dad-c0bfe47d599f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.465260 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb4tz\" (UniqueName: \"kubernetes.io/projected/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-kube-api-access-vb4tz\") pod \"collect-profiles-29153340-7mt4c\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.475548 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.479668 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4ns8\" (UniqueName: \"kubernetes.io/projected/02e23293-e9f2-4e5e-9aae-bdeb17c17823-kube-api-access-b4ns8\") pod \"ingress-canary-dlswd\" (UID: \"02e23293-e9f2-4e5e-9aae-bdeb17c17823\") " pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.489777 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.490235 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:48.990192587 +0000 UTC m=+40.265618130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.490369 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.501662 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw"] Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.503553 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.003533398 +0000 UTC m=+40.278958941 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.510284 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k82wx\" (UniqueName: \"kubernetes.io/projected/2654cb62-4a89-4c50-ab33-27c9273b7e82-kube-api-access-k82wx\") pod \"csi-hostpathplugin-5t4fd\" (UID: \"2654cb62-4a89-4c50-ab33-27c9273b7e82\") " pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.520582 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg6tr\" (UniqueName: \"kubernetes.io/projected/31704392-a75e-4c29-b905-778ae0b34fb6-kube-api-access-wg6tr\") pod \"authentication-operator-69f744f599-fghbg\" (UID: \"31704392-a75e-4c29-b905-778ae0b34fb6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.542140 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.544602 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtcfg\" (UniqueName: \"kubernetes.io/projected/b2220da6-a793-45c1-923b-36bda543a176-kube-api-access-jtcfg\") pod \"olm-operator-6b444d44fb-8tqzh\" (UID: \"b2220da6-a793-45c1-923b-36bda543a176\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.546789 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.553251 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.561909 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4rrn\" (UniqueName: \"kubernetes.io/projected/9f95470f-d456-468e-83a7-e72420631662-kube-api-access-c4rrn\") pod \"package-server-manager-789f6589d5-tgj9w\" (UID: \"9f95470f-d456-468e-83a7-e72420631662\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.562224 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.567713 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6jl5k" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.579304 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-z2z69"] Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.584242 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.590406 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.591548 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.591988 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.091944844 +0000 UTC m=+40.367370387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.598161 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.605620 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.614743 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.619296 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wdk2h"] Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.621649 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ftvnt"] Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.633862 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-dlswd" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.645019 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.663043 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6a72352_dbe3_43f1_aae4_c3a6bb160ff1.slice/crio-b1021e5b60f15ff8ae28d0b7e78a38094ecca1a6e97163a39ec9c1f992ae66cb WatchSource:0}: Error finding container b1021e5b60f15ff8ae28d0b7e78a38094ecca1a6e97163a39ec9c1f992ae66cb: Status 404 returned error can't find the container with id b1021e5b60f15ff8ae28d0b7e78a38094ecca1a6e97163a39ec9c1f992ae66cb Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.669773 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85df889f_53b1_41d3_a6fa_9a05019ef05b.slice/crio-dd71c2f686d105e18ec5e4db052878f779b6326af7ee9f7512185ecc530519f4 WatchSource:0}: Error finding container dd71c2f686d105e18ec5e4db052878f779b6326af7ee9f7512185ecc530519f4: Status 404 returned error can't find the container with id dd71c2f686d105e18ec5e4db052878f779b6326af7ee9f7512185ecc530519f4 Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.690210 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.694299 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.695279 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.195259746 +0000 UTC m=+40.470685289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.708955 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-pld6r"] Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.761706 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l9hbv"] Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.773201 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ae50abd_4f8f_495f_8d6e_34f8b2b19711.slice/crio-548fc523b7628bb1e56761dd1527c5a2271570df886538e1584efa1cf38a2de6 WatchSource:0}: Error finding container 548fc523b7628bb1e56761dd1527c5a2271570df886538e1584efa1cf38a2de6: Status 404 returned error can't find the container with id 548fc523b7628bb1e56761dd1527c5a2271570df886538e1584efa1cf38a2de6 Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.795520 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.795838 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.295820046 +0000 UTC m=+40.571245589 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.830650 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" podStartSLOduration=17.830618172 podStartE2EDuration="17.830618172s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:48.826903788 +0000 UTC m=+40.102329331" watchObservedRunningTime="2025-06-06 09:13:48.830618172 +0000 UTC m=+40.106043715" Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.875082 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9"] Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.883536 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3c39a95_76c2_42b7_a8e8_e1379e1236bf.slice/crio-31f4a1467103e3c1a66ace341f3aebe12c9d29ea5df0865d930c8519d503c2ac WatchSource:0}: Error finding container 31f4a1467103e3c1a66ace341f3aebe12c9d29ea5df0865d930c8519d503c2ac: Status 404 returned error can't find the container with id 31f4a1467103e3c1a66ace341f3aebe12c9d29ea5df0865d930c8519d503c2ac Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.885895 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d7622e9_e3cd_4bc9_921b_c5d7ef47f515.slice/crio-a17ac789870e1143711506905d9ed79525639f23ed2c7d7834688d1a9a8854c4 WatchSource:0}: Error finding container a17ac789870e1143711506905d9ed79525639f23ed2c7d7834688d1a9a8854c4: Status 404 returned error can't find the container with id a17ac789870e1143711506905d9ed79525639f23ed2c7d7834688d1a9a8854c4 Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.897366 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:48 crc kubenswrapper[4911]: E0606 09:13:48.897695 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.397680656 +0000 UTC m=+40.673106199 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.900112 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h"] Jun 06 09:13:48 crc kubenswrapper[4911]: W0606 09:13:48.912859 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87f30b86_0303_493c_8919_e37e07f71709.slice/crio-d357b8e5fa6b532aea28a3ab41244cbe782dfbafb01415de5fa749f51d588070 WatchSource:0}: Error finding container d357b8e5fa6b532aea28a3ab41244cbe782dfbafb01415de5fa749f51d588070: Status 404 returned error can't find the container with id d357b8e5fa6b532aea28a3ab41244cbe782dfbafb01415de5fa749f51d588070 Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.996591 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5"] Jun 06 09:13:48 crc kubenswrapper[4911]: I0606 09:13:48.999378 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:48.999966 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.499888513 +0000 UTC m=+40.775314056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.000731 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.001419 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.501397327 +0000 UTC m=+40.776822870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: W0606 09:13:49.101670 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ea53c70_3632_45b8_a47f_16c6f3bc565a.slice/crio-9a5cfed502f954b5d15682f4a32bf3a120fcff549107ae60c664b5264992a621 WatchSource:0}: Error finding container 9a5cfed502f954b5d15682f4a32bf3a120fcff549107ae60c664b5264992a621: Status 404 returned error can't find the container with id 9a5cfed502f954b5d15682f4a32bf3a120fcff549107ae60c664b5264992a621 Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.106609 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.106763 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.606738735 +0000 UTC m=+40.882164278 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.106903 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.107290 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.607257297 +0000 UTC m=+40.882682840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.139253 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d"] Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.185057 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj"] Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.210657 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.211049 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.711025849 +0000 UTC m=+40.986451382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.213725 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t"] Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.218188 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k"] Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.312623 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.312920 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.812908699 +0000 UTC m=+41.088334242 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.411144 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-bn7hn" event={"ID":"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1","Type":"ContainerStarted","Data":"b1021e5b60f15ff8ae28d0b7e78a38094ecca1a6e97163a39ec9c1f992ae66cb"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.420733 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" event={"ID":"97427249-89c5-4e96-850e-0af8f860865a","Type":"ContainerStarted","Data":"0da4cc5afd857040c7f40cb0f87013a765f16d3df6d180220304f1d59f778069"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.422943 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" event={"ID":"e3c39a95-76c2-42b7-a8e8-e1379e1236bf","Type":"ContainerStarted","Data":"31f4a1467103e3c1a66ace341f3aebe12c9d29ea5df0865d930c8519d503c2ac"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.423612 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" event={"ID":"3ed04786-6dd4-418a-a077-534b7e26fdab","Type":"ContainerStarted","Data":"4dd113694788adecd4909cbf67daef8e02b1d68b905bb976508fff6a509fa6cb"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.428441 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.428838 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:49.928817986 +0000 UTC m=+41.204243539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.463942 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"aacf069ca3745bb1ecf4c8a36858606e3916a27752d40ec2a4080c753880afc0"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.464049 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c8125ef86ef1605223db496f620afcd23e302ad8a36a9dc57cfaba3716f676dc"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.466202 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" event={"ID":"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515","Type":"ContainerStarted","Data":"a17ac789870e1143711506905d9ed79525639f23ed2c7d7834688d1a9a8854c4"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.475517 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-5zv54" podStartSLOduration=18.475473779 podStartE2EDuration="18.475473779s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:49.471179402 +0000 UTC m=+40.746604965" watchObservedRunningTime="2025-06-06 09:13:49.475473779 +0000 UTC m=+40.750899322" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.476773 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"548025f4f8b80b2a03d27e37fca9faa92d5ee22e9a18df8e511b8bbf48731786"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.476990 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.478701 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-77fpp" podStartSLOduration=18.478665311 podStartE2EDuration="18.478665311s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:49.427936026 +0000 UTC m=+40.703361589" watchObservedRunningTime="2025-06-06 09:13:49.478665311 +0000 UTC m=+40.754090854" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.482241 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" event={"ID":"9999ca00-de82-4451-bfe9-c216be6edd43","Type":"ContainerStarted","Data":"3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.482554 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.483562 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" event={"ID":"0ea53c70-3632-45b8-a47f-16c6f3bc565a","Type":"ContainerStarted","Data":"9a5cfed502f954b5d15682f4a32bf3a120fcff549107ae60c664b5264992a621"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.485957 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"f72aa5da9a5f399f3f7cda2e5a204625ba21bd9a8f4a15cb95d41737db8e01c5"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.488050 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" event={"ID":"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02","Type":"ContainerStarted","Data":"7894c3b1c4cafc7bf994d3d7955a6d96f96d6d8cdfa8c4ad3807ce9a2d7b700d"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.489011 4911 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-bsqpb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.489086 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" podUID="9999ca00-de82-4451-bfe9-c216be6edd43" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.495134 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" event={"ID":"acf3bc54-04c0-416f-bf4a-541244ac0074","Type":"ContainerStarted","Data":"0bfc6b8f4240a45895e7bf3d935558c27a6a7341f859916f9bf5021deac25970"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.517309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6jl5k" event={"ID":"843168d2-cb7b-42e3-bfe9-63e012c28428","Type":"ContainerStarted","Data":"2fd52872491e3d910433eddf4cb89115d7601c66402c7cad3cab8d5e0646da8c"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.519341 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" event={"ID":"c9c6b446-7eb9-402d-b1c2-11e00895054a","Type":"ContainerStarted","Data":"c646025112d0c842b0e8cb807ef25ea7b0d14720dbc78d01adba4f7dad329d06"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.520692 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" event={"ID":"85df889f-53b1-41d3-a6fa-9a05019ef05b","Type":"ContainerStarted","Data":"dd71c2f686d105e18ec5e4db052878f779b6326af7ee9f7512185ecc530519f4"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.524214 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" event={"ID":"5502a710-ac52-4347-8c1f-095219735356","Type":"ContainerStarted","Data":"2a52ca18f789c919284dcf704aad82eafbed1356293a520479000a2afbbcf5aa"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.526465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" event={"ID":"bb386b0e-b615-4901-b3aa-0c947a397281","Type":"ContainerStarted","Data":"ee8c477f7d6d82618eb5fc9cefeba666ec1aec5630aabcb5adbaf9d68634db6a"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.526514 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" event={"ID":"bb386b0e-b615-4901-b3aa-0c947a397281","Type":"ContainerStarted","Data":"83060a8014d07beafd201e4e1fc5d30702fcec395faa9a432952fb37bca0478c"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.527788 4911 generic.go:334] "Generic (PLEG): container finished" podID="d05fca62-157f-4111-966b-0b1bad77fc76" containerID="a5ff4309d0841ac4e5167009dc8e2b652e75c661efa20f5aa98efb877120c5d8" exitCode=0 Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.527863 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" event={"ID":"d05fca62-157f-4111-966b-0b1bad77fc76","Type":"ContainerDied","Data":"a5ff4309d0841ac4e5167009dc8e2b652e75c661efa20f5aa98efb877120c5d8"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.529515 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.531072 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.031050294 +0000 UTC m=+41.306475837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.533546 4911 generic.go:334] "Generic (PLEG): container finished" podID="b72e3dc5-6506-4f38-bd35-23abe3a44764" containerID="1c98f1e455818bd56b71210143fb3f459a695a29e84c4bdc4283eaee69d12959" exitCode=0 Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.533618 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" event={"ID":"b72e3dc5-6506-4f38-bd35-23abe3a44764","Type":"ContainerDied","Data":"1c98f1e455818bd56b71210143fb3f459a695a29e84c4bdc4283eaee69d12959"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.537012 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" event={"ID":"49294ad0-2f04-4c24-b5b5-974c2b3e5259","Type":"ContainerStarted","Data":"fccf4776e27fa38485facda3512b2669b4f4af7b4c04f4f978f919ffe0a86008"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.569897 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" event={"ID":"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1","Type":"ContainerStarted","Data":"3e73cd79c607932be4ad0ee6bfda89a2b81450e979d667d996d47c1e8180b8e0"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.577874 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-4dcwp" event={"ID":"69127e92-f707-4b41-a690-9fd917998557","Type":"ContainerStarted","Data":"ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.589766 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" event={"ID":"4ae50abd-4f8f-495f-8d6e-34f8b2b19711","Type":"ContainerStarted","Data":"548fc523b7628bb1e56761dd1527c5a2271570df886538e1584efa1cf38a2de6"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.633407 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.633718 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.133700911 +0000 UTC m=+41.409126454 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.650024 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" event={"ID":"22e821a6-f095-4c6e-ac9e-8484e31bd21e","Type":"ContainerStarted","Data":"798ebb3b32ac9ed373fe9a2e59d2cd9494ade5aeb58c732a068f4445d4b40fd3"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.652187 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" event={"ID":"19840c29-1e2b-499f-8a15-714083bd64a2","Type":"ContainerStarted","Data":"60993670133a69466b1ab29fea0607c33851213c6c832483adad2de8b40ce506"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.657991 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ftvnt" event={"ID":"87f30b86-0303-493c-8919-e37e07f71709","Type":"ContainerStarted","Data":"d357b8e5fa6b532aea28a3ab41244cbe782dfbafb01415de5fa749f51d588070"} Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.692581 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.734481 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.735037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.736117 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.236085083 +0000 UTC m=+41.511510626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.778810 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ede76ff2-387d-4778-b8f5-0dbbc5cf5c35-metrics-certs\") pod \"network-metrics-daemon-w544n\" (UID: \"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35\") " pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.839499 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.839856 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.33962672 +0000 UTC m=+41.615052273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.840363 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.843148 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.343136039 +0000 UTC m=+41.618561582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.942333 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:49 crc kubenswrapper[4911]: E0606 09:13:49.943670 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.443644808 +0000 UTC m=+41.719070351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.948163 4911 scope.go:117] "RemoveContainer" containerID="1d5c13952f0ca74a326866142cf30aa66a1154139d0383ccce5632f68951a07f" Jun 06 09:13:49 crc kubenswrapper[4911]: I0606 09:13:49.974541 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-w544n" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.044991 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.045928 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.545856246 +0000 UTC m=+41.821281789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.115796 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-5zv54" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.149980 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.150576 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.650558919 +0000 UTC m=+41.925984462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.215730 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-4dcwp" podStartSLOduration=19.21569314 podStartE2EDuration="19.21569314s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:50.208738763 +0000 UTC m=+41.484164316" watchObservedRunningTime="2025-06-06 09:13:50.21569314 +0000 UTC m=+41.491118683" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.257421 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.258123 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.758077676 +0000 UTC m=+42.033503219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.268582 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.358562 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.359002 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.858980414 +0000 UTC m=+42.134405957 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.361570 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" podStartSLOduration=18.360153781 podStartE2EDuration="18.360153781s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:50.359631719 +0000 UTC m=+41.635057262" watchObservedRunningTime="2025-06-06 09:13:50.360153781 +0000 UTC m=+41.635579324" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.460576 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.461052 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:50.961034508 +0000 UTC m=+42.236460051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.575021 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.575670 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.075652526 +0000 UTC m=+42.351078069 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.635428 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-r56ns" podStartSLOduration=18.635406465 podStartE2EDuration="18.635406465s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:50.484394295 +0000 UTC m=+41.759819848" watchObservedRunningTime="2025-06-06 09:13:50.635406465 +0000 UTC m=+41.910832008" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.648183 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.676695 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.679214 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.179194923 +0000 UTC m=+42.454620466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.717496 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-64hwv"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.727675 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.740359 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ftvnt" event={"ID":"87f30b86-0303-493c-8919-e37e07f71709","Type":"ContainerStarted","Data":"90eb3cfd62171637090381238a9de2a9ed9e82306ef42a8d1ea55156c8cee1aa"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.741618 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-ftvnt" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.751153 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xj4b2"] Jun 06 09:13:50 crc kubenswrapper[4911]: W0606 09:13:50.774379 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2220da6_a793_45c1_923b_36bda543a176.slice/crio-ad9d7b7f82988550876d43b222e2ae67daa25ee451cf3453bb09c867c0dd5268 WatchSource:0}: Error finding container ad9d7b7f82988550876d43b222e2ae67daa25ee451cf3453bb09c867c0dd5268: Status 404 returned error can't find the container with id ad9d7b7f82988550876d43b222e2ae67daa25ee451cf3453bb09c867c0dd5268 Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.782641 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-ftvnt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.782728 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ftvnt" podUID="87f30b86-0303-493c-8919-e37e07f71709" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.826783 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.828086 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.328050133 +0000 UTC m=+42.603475676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.828226 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" event={"ID":"c9c6b446-7eb9-402d-b1c2-11e00895054a","Type":"ContainerStarted","Data":"4a26dc186aa9f26664ddadf8c58298c6823cc180e38a3344384543fdc37e1412"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.828299 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.834619 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.839328 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbghq" podStartSLOduration=18.839277037 podStartE2EDuration="18.839277037s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:50.783285833 +0000 UTC m=+42.058711376" watchObservedRunningTime="2025-06-06 09:13:50.839277037 +0000 UTC m=+42.114702580" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.859982 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" event={"ID":"97427249-89c5-4e96-850e-0af8f860865a","Type":"ContainerStarted","Data":"f663b8593fcb38175bef8a769c5d46953f5c94f536dcfe01c1b78a21aa48186e"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.863563 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5t4fd"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.901466 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.902408 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-bn7hn" event={"ID":"d6a72352-dbe3-43f1-aae4-c3a6bb160ff1","Type":"ContainerStarted","Data":"d54a6ab6beef199c8a033370fd95396ba210d3e412d55517d8434aaf667db76a"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.903933 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-dlswd"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.907266 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" event={"ID":"19840c29-1e2b-499f-8a15-714083bd64a2","Type":"ContainerStarted","Data":"5ba9f8341b1107b8815f7bd4122c67eabf7f23ea1d31aedacbe30f7ed8b88d77"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.913943 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.923401 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.923484 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.924935 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" event={"ID":"bc05a058-b682-4b99-8eef-ba7c1acf1782","Type":"ContainerStarted","Data":"056eb233de503a131ca242e0f199bfc63ac2ad5bc067644375a1957242665479"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.926833 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" event={"ID":"16222bda-92f4-483e-8fa8-6e701085186f","Type":"ContainerStarted","Data":"ffc41aea84ee98fa7d839bad66b00048d1186a357fb2503598f8831748a5b0cf"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.928175 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" event={"ID":"c695455e-b05b-4406-9471-b23af6eef3a1","Type":"ContainerStarted","Data":"7227b839225f43ff2f929fb558d60b6a8e8209d1873aa4b2b5c7329dc11a0b77"} Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.928204 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fghbg"] Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.929884 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-l9hbv" podStartSLOduration=18.929864792 podStartE2EDuration="18.929864792s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:50.923548399 +0000 UTC m=+42.198973962" watchObservedRunningTime="2025-06-06 09:13:50.929864792 +0000 UTC m=+42.205290335" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.935076 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.943476 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:13:50 crc kubenswrapper[4911]: E0606 09:13:50.945691 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.445671839 +0000 UTC m=+42.721097382 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:50 crc kubenswrapper[4911]: I0606 09:13:50.972266 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-w544n"] Jun 06 09:13:50 crc kubenswrapper[4911]: W0606 09:13:50.997374 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73326677_9e72_46c9_8dad_c0bfe47d599f.slice/crio-9d430a3ef7cf24c6ea9a4b18f19eca14d018c65e5da2a074130760873322c3ef WatchSource:0}: Error finding container 9d430a3ef7cf24c6ea9a4b18f19eca14d018c65e5da2a074130760873322c3ef: Status 404 returned error can't find the container with id 9d430a3ef7cf24c6ea9a4b18f19eca14d018c65e5da2a074130760873322c3ef Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.036118 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.036358 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.536334655 +0000 UTC m=+42.811760198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.036757 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.037175 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.537161904 +0000 UTC m=+42.812587447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.056280 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-ftvnt" podStartSLOduration=20.056246515 podStartE2EDuration="20.056246515s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:51.022549914 +0000 UTC m=+42.297975467" watchObservedRunningTime="2025-06-06 09:13:51.056246515 +0000 UTC m=+42.331672058" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.057276 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kwg9g" podStartSLOduration=19.057252758 podStartE2EDuration="19.057252758s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:51.055724223 +0000 UTC m=+42.331149786" watchObservedRunningTime="2025-06-06 09:13:51.057252758 +0000 UTC m=+42.332678301" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.071693 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.071903 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.104086 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-bn7hn" podStartSLOduration=20.104058264 podStartE2EDuration="20.104058264s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:51.099054191 +0000 UTC m=+42.374479754" watchObservedRunningTime="2025-06-06 09:13:51.104058264 +0000 UTC m=+42.379483817" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.122179 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.137952 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.138354 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.638320558 +0000 UTC m=+42.913746111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.237182 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.238906 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.238975 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.239846 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.240161 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.740150797 +0000 UTC m=+43.015576330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.342786 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.343141 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.843109741 +0000 UTC m=+43.118535294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.447925 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.448313 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:51.948300816 +0000 UTC m=+43.223726349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.549077 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.549776 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.049760456 +0000 UTC m=+43.325185999 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.651204 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.651682 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.151662106 +0000 UTC m=+43.427087649 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.753195 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.753617 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.253597847 +0000 UTC m=+43.529023390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.855911 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.856375 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.356358316 +0000 UTC m=+43.631783869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.956518 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:51 crc kubenswrapper[4911]: E0606 09:13:51.956855 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.456838975 +0000 UTC m=+43.732264518 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:51 crc kubenswrapper[4911]: I0606 09:13:51.970469 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" event={"ID":"acf3bc54-04c0-416f-bf4a-541244ac0074","Type":"ContainerStarted","Data":"1c42e5a883057ca511aa96e2f98e99f1b4bea726dbf01fb936b08c7f8b27b6f9"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:51.998301 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" event={"ID":"79fabc0e-2c89-4889-9a07-afa5306b20f8","Type":"ContainerStarted","Data":"569f3fde77b6c1ba782fcf9b3abf834731d794a30c9a60c2b80e9512fc9c3836"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.005655 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h4q7d" podStartSLOduration=20.005621446 podStartE2EDuration="20.005621446s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.005472072 +0000 UTC m=+43.280897635" watchObservedRunningTime="2025-06-06 09:13:52.005621446 +0000 UTC m=+43.281046999" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.025437 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" event={"ID":"f2018465-ff54-4f01-aef9-a87a2973d419","Type":"ContainerStarted","Data":"3566c1c092c5dfeca8a08c05adf9095073a937f4d43a1fcd569f2dcf65acf2c7"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.045552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-dlswd" event={"ID":"02e23293-e9f2-4e5e-9aae-bdeb17c17823","Type":"ContainerStarted","Data":"ff8e8781c7a565549d4d9e3c4cd46475ce476fe1a81b2c92f8cc79ec8daec6de"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.058964 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.059913 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.559885351 +0000 UTC m=+43.835310894 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.064378 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" event={"ID":"c9c6b446-7eb9-402d-b1c2-11e00895054a","Type":"ContainerStarted","Data":"6c2b9f25e185cf3a5cfd5fda0f5c52b191ee4864eb5fe333bcbd536f074a60a8"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.104042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" event={"ID":"b72e3dc5-6506-4f38-bd35-23abe3a44764","Type":"ContainerStarted","Data":"de3bd6cb6d8c7cc0ff2ee072df6673964085b274dd79fd5bc0258f622507f25c"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.121107 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9b2sw" podStartSLOduration=21.121065102 podStartE2EDuration="21.121065102s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.11433215 +0000 UTC m=+43.389757693" watchObservedRunningTime="2025-06-06 09:13:52.121065102 +0000 UTC m=+43.396490645" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.133876 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" event={"ID":"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515","Type":"ContainerStarted","Data":"31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.135202 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.159266 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podStartSLOduration=7.159245624 podStartE2EDuration="7.159245624s" podCreationTimestamp="2025-06-06 09:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.15865539 +0000 UTC m=+43.434080943" watchObservedRunningTime="2025-06-06 09:13:52.159245624 +0000 UTC m=+43.434671167" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.161979 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.163048 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.663021319 +0000 UTC m=+43.938446862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.191589 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" event={"ID":"b2220da6-a793-45c1-923b-36bda543a176","Type":"ContainerStarted","Data":"ad9d7b7f82988550876d43b222e2ae67daa25ee451cf3453bb09c867c0dd5268"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.192012 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.209481 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" event={"ID":"73326677-9e72-46c9-8dad-c0bfe47d599f","Type":"ContainerStarted","Data":"9d430a3ef7cf24c6ea9a4b18f19eca14d018c65e5da2a074130760873322c3ef"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.210629 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.212798 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" event={"ID":"573af29b-3e41-4b58-aec9-8bbfe7845920","Type":"ContainerStarted","Data":"cf8a3388ad1ab621d4eed7b2a88310a5aa6bf7980f0ae95eff3bb083c9605bd7"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.220351 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" event={"ID":"31704392-a75e-4c29-b905-778ae0b34fb6","Type":"ContainerStarted","Data":"ca7b32eb4d37ba6fae7afefc6565c6122175db2ffbcc217939a14b27e50e2e3b"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.227324 4911 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8tqzh container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.227423 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" podUID="b2220da6-a793-45c1-923b-36bda543a176" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.229036 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" podStartSLOduration=20.228976288 podStartE2EDuration="20.228976288s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.225405257 +0000 UTC m=+43.500830820" watchObservedRunningTime="2025-06-06 09:13:52.228976288 +0000 UTC m=+43.504401841" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.234466 4911 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-r4hl5 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" start-of-body= Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.234539 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" podUID="73326677-9e72-46c9-8dad-c0bfe47d599f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.242366 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:52 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:52 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:52 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.242504 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.264004 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" podStartSLOduration=20.263978518 podStartE2EDuration="20.263978518s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.261709417 +0000 UTC m=+43.537134990" watchObservedRunningTime="2025-06-06 09:13:52.263978518 +0000 UTC m=+43.539404091" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.264252 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.265536 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.765516583 +0000 UTC m=+44.040942126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.272793 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" event={"ID":"5852e56b-1121-4a77-a104-225400bdf58b","Type":"ContainerStarted","Data":"56d71e6a44a713b70178b773baf85eb06e2d342bcd1c4e308c3fb1603be313a6"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.298409 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-w544n" event={"ID":"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35","Type":"ContainerStarted","Data":"533c603a435781d686722916592d17467793f3ec3d46fc7ef96bbd3a6267a01f"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.303226 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" event={"ID":"16222bda-92f4-483e-8fa8-6e701085186f","Type":"ContainerStarted","Data":"4402977aefbc7caafbeacd8dc38d95c99918e76cb91c27e4b06f3196f1a695f8"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.317419 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.317583 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xj4b2" event={"ID":"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614","Type":"ContainerStarted","Data":"4f579af337987f443dffacad4b5ed0a4c004ba7038fbf4c89cc06a40b0102636"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.321730 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" event={"ID":"9f95470f-d456-468e-83a7-e72420631662","Type":"ContainerStarted","Data":"65d5b509732b2d2b1299bd91ac6dd5c93b5dbc86f61d9f983fb8c67530cbc9d7"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.323147 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" event={"ID":"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1","Type":"ContainerStarted","Data":"79a7304f201f3a5673c544cbef5b4c76ac443be2aa981fc5401b0d2b1b021534"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.324296 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" event={"ID":"85df889f-53b1-41d3-a6fa-9a05019ef05b","Type":"ContainerStarted","Data":"18823c785e577f69b169737059ce199645e1a6fde2b1e0fad16d126e2dfd8c2a"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.328434 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" event={"ID":"0ea53c70-3632-45b8-a47f-16c6f3bc565a","Type":"ContainerStarted","Data":"29db421c426a83124ecbf39e05b4298a8de678c4b7afd6aee00e2c7eac08be22"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.350249 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bt8wp" podStartSLOduration=21.350225565 podStartE2EDuration="21.350225565s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.349600831 +0000 UTC m=+43.625026374" watchObservedRunningTime="2025-06-06 09:13:52.350225565 +0000 UTC m=+43.625651108" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.352562 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" event={"ID":"b1afd53c-ecea-46b7-ae7f-9313ab7b9a02","Type":"ContainerStarted","Data":"8c4382304bfdf431f6fbf991680e1b2912903f15fa94229f158f620461718260"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.368264 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.368492 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.868456107 +0000 UTC m=+44.143881670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.368741 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.369127 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.869110141 +0000 UTC m=+44.144535684 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.370450 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" event={"ID":"2654cb62-4a89-4c50-ab33-27c9273b7e82","Type":"ContainerStarted","Data":"d7e08ae9892ff2100238000c43bfbc5cbee83b705f071a1dfa791d3fa3e98f50"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.384036 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xtd6h" podStartSLOduration=20.384006078 podStartE2EDuration="20.384006078s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.374086224 +0000 UTC m=+43.649511787" watchObservedRunningTime="2025-06-06 09:13:52.384006078 +0000 UTC m=+43.659431621" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.399535 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" event={"ID":"49294ad0-2f04-4c24-b5b5-974c2b3e5259","Type":"ContainerStarted","Data":"0fe0762c6bd2cfe7d70f66e860471204695d8ab54f0e48743f5122da719879df"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.439750 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" podStartSLOduration=21.439717685 podStartE2EDuration="21.439717685s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.410785232 +0000 UTC m=+43.686210795" watchObservedRunningTime="2025-06-06 09:13:52.439717685 +0000 UTC m=+43.715143228" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.447033 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6jl5k" event={"ID":"843168d2-cb7b-42e3-bfe9-63e012c28428","Type":"ContainerStarted","Data":"269c48e822cd6bd4aae64314678684c3f35e9dd6fcf6d852ad4232b9783b4564"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.470542 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.471248 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.971218666 +0000 UTC m=+44.246644209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.475979 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.483368 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:52.9833487 +0000 UTC m=+44.258774233 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.485223 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" event={"ID":"c36e9c0e-d7bb-4307-8767-c34651aeb7a8","Type":"ContainerStarted","Data":"9c4a8bf775467270a658e0d090270788dd8d491291cea4b29ce48b234c2661cb"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.485267 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" event={"ID":"c36e9c0e-d7bb-4307-8767-c34651aeb7a8","Type":"ContainerStarted","Data":"2bed004dff9831e699e3498b8d26f7fe8738aab3332d634385bbc28ea1857bc7"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.519059 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-wdk2h" podStartSLOduration=21.519033456 podStartE2EDuration="21.519033456s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.506244837 +0000 UTC m=+43.781670400" watchObservedRunningTime="2025-06-06 09:13:52.519033456 +0000 UTC m=+43.794458999" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.562832 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" event={"ID":"4ae50abd-4f8f-495f-8d6e-34f8b2b19711","Type":"ContainerStarted","Data":"fe27d21344612727a47f9002a7c9feb7c7d7b984a8b2ab914531950dc2f8510e"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.564945 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.577057 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.578251 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.078234182 +0000 UTC m=+44.353659715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.583575 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6jl5k" podStartSLOduration=7.583554362 podStartE2EDuration="7.583554362s" podCreationTimestamp="2025-06-06 09:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.575402288 +0000 UTC m=+43.850827831" watchObservedRunningTime="2025-06-06 09:13:52.583554362 +0000 UTC m=+43.858979905" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.593635 4911 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-z2z69 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.15:6443/healthz\": dial tcp 10.217.0.15:6443: connect: connection refused" start-of-body= Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.593690 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" podUID="4ae50abd-4f8f-495f-8d6e-34f8b2b19711" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.15:6443/healthz\": dial tcp 10.217.0.15:6443: connect: connection refused" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.633085 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" podStartSLOduration=20.6330642 podStartE2EDuration="20.6330642s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.632735353 +0000 UTC m=+43.908160906" watchObservedRunningTime="2025-06-06 09:13:52.6330642 +0000 UTC m=+43.908489743" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.657491 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" event={"ID":"5502a710-ac52-4347-8c1f-095219735356","Type":"ContainerStarted","Data":"b4ffad0814cf4ffc65e17c2737c8593614b02593ca950a8428bcaad26488c47d"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.678593 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.678927 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.178911055 +0000 UTC m=+44.454336598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.709460 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" event={"ID":"e3c39a95-76c2-42b7-a8e8-e1379e1236bf","Type":"ContainerStarted","Data":"478bb230f71088194bf576c895f5c940913583189ee216167d42b1d79888978b"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.765022 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" podStartSLOduration=21.765000619 podStartE2EDuration="21.765000619s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.713636589 +0000 UTC m=+43.989062132" watchObservedRunningTime="2025-06-06 09:13:52.765000619 +0000 UTC m=+44.040426162" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.766907 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-fb9n5" podStartSLOduration=21.766895271 podStartE2EDuration="21.766895271s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.766648376 +0000 UTC m=+44.042073929" watchObservedRunningTime="2025-06-06 09:13:52.766895271 +0000 UTC m=+44.042320814" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.779212 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.779840 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.781182 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.281163693 +0000 UTC m=+44.556589236 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.838812 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"63154a289bcb0af4b184f1bbb90e96282f47ee1ad34a54f42d20c41253dc5fbd"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.840370 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.843195 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" event={"ID":"bc05a058-b682-4b99-8eef-ba7c1acf1782","Type":"ContainerStarted","Data":"a3f931e24566b096bcb9dfba79b66db8e082a21c18147898b98bb537bb090595"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.862639 4911 generic.go:334] "Generic (PLEG): container finished" podID="3ed04786-6dd4-418a-a077-534b7e26fdab" containerID="14cea525e51d33c11353ea6332fa7c5989a98ed43c605efb8f16afeaa00d01c0" exitCode=0 Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.863691 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" event={"ID":"3ed04786-6dd4-418a-a077-534b7e26fdab","Type":"ContainerDied","Data":"14cea525e51d33c11353ea6332fa7c5989a98ed43c605efb8f16afeaa00d01c0"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.879356 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" event={"ID":"36152524-008c-4ae4-9b35-70aceacddd34","Type":"ContainerStarted","Data":"fe2e05f0d1ce5014f1597570a0717eeb0ff35998af7b1db5c81c72298a346376"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.888041 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.888798 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.388765572 +0000 UTC m=+44.664191275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.894705 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=20.894684406 podStartE2EDuration="20.894684406s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.893008028 +0000 UTC m=+44.168433581" watchObservedRunningTime="2025-06-06 09:13:52.894684406 +0000 UTC m=+44.170109949" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.903720 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" event={"ID":"c695455e-b05b-4406-9471-b23af6eef3a1","Type":"ContainerStarted","Data":"3e2c9bfb0eaf3083e3c9a38d163700ebbc7ab96254cb4e7144d84481186d2ede"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.911614 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" event={"ID":"d05fca62-157f-4111-966b-0b1bad77fc76","Type":"ContainerStarted","Data":"301203010a07d9eb3eaeb144d3b1e726b5543212a4da38aa5ad16df9c974330d"} Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.914723 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-ftvnt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.914775 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ftvnt" podUID="87f30b86-0303-493c-8919-e37e07f71709" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.967294 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cxj7k" podStartSLOduration=20.967278225 podStartE2EDuration="20.967278225s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.966058127 +0000 UTC m=+44.241483670" watchObservedRunningTime="2025-06-06 09:13:52.967278225 +0000 UTC m=+44.242703768" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.985508 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" podStartSLOduration=20.985488106 podStartE2EDuration="20.985488106s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:52.983503901 +0000 UTC m=+44.258929444" watchObservedRunningTime="2025-06-06 09:13:52.985488106 +0000 UTC m=+44.260913669" Jun 06 09:13:52 crc kubenswrapper[4911]: I0606 09:13:52.990174 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:52 crc kubenswrapper[4911]: E0606 09:13:52.991192 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.491153624 +0000 UTC m=+44.766579167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.016435 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" podStartSLOduration=21.016413714 podStartE2EDuration="21.016413714s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:53.016369833 +0000 UTC m=+44.291795396" watchObservedRunningTime="2025-06-06 09:13:53.016413714 +0000 UTC m=+44.291839257" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.042882 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c2x4t" podStartSLOduration=22.042844831 podStartE2EDuration="22.042844831s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:53.041255295 +0000 UTC m=+44.316680838" watchObservedRunningTime="2025-06-06 09:13:53.042844831 +0000 UTC m=+44.318270374" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.091614 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.092205 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.592181674 +0000 UTC m=+44.867607217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.199909 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.200446 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.700424588 +0000 UTC m=+44.975850131 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.253693 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:53 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:53 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:53 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.253756 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.302005 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.302557 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.802534213 +0000 UTC m=+45.077959756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.404228 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.404514 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.904467554 +0000 UTC m=+45.179893097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.404594 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.405044 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:53.905035867 +0000 UTC m=+45.180461400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.436869 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-8pgq8"] Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.505604 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.505878 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.005834773 +0000 UTC m=+45.281260316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.506130 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.506590 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.006579809 +0000 UTC m=+45.282005352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.606909 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.607484 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.107462197 +0000 UTC m=+45.382887740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.607828 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.608143 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.108136522 +0000 UTC m=+45.383562065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.709641 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.709911 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.209869609 +0000 UTC m=+45.485295192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.710039 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.710394 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.21038612 +0000 UTC m=+45.485811663 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.810969 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.811166 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.311141835 +0000 UTC m=+45.586567388 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.811661 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.812210 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.312185958 +0000 UTC m=+45.587611501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.912900 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.913327 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.413281381 +0000 UTC m=+45.688706934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.913412 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:53 crc kubenswrapper[4911]: E0606 09:13:53.913931 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.413919895 +0000 UTC m=+45.689345438 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.921488 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" event={"ID":"573af29b-3e41-4b58-aec9-8bbfe7845920","Type":"ContainerStarted","Data":"a5160a7dd73b764858005aa3892acaa300753976b3bf4515b7ed373e86379dfb"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.921809 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.923494 4911 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-64hwv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.923556 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" podUID="573af29b-3e41-4b58-aec9-8bbfe7845920" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.924938 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" event={"ID":"31704392-a75e-4c29-b905-778ae0b34fb6","Type":"ContainerStarted","Data":"77a1d78836be4d96540db1517130c2ed647d4e0af153adda510389311c951b28"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.927502 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" event={"ID":"9f95470f-d456-468e-83a7-e72420631662","Type":"ContainerStarted","Data":"7645eb8d1ce6731515d0ecccac3a5966e7c0df93844c7423d82f26c87f192b53"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.927550 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" event={"ID":"9f95470f-d456-468e-83a7-e72420631662","Type":"ContainerStarted","Data":"c9fe2492c61803a589a1ced5bf3065ec65390ee20342050c6926b42fd9bf22a3"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.927687 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.930490 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" event={"ID":"cea8e043-ab75-4ad6-aa10-8210fb5ee7a1","Type":"ContainerStarted","Data":"5381979bfeb0b8693b493b94e5cc99d92b2c54768fa7365616efb9e886a590be"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.933646 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s9cdv" event={"ID":"36152524-008c-4ae4-9b35-70aceacddd34","Type":"ContainerStarted","Data":"4bff0acc0ca02b9218a1c2da8f0cb733db46941eff11b85b11205930a7a7c506"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.936220 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-46t9l" event={"ID":"85df889f-53b1-41d3-a6fa-9a05019ef05b","Type":"ContainerStarted","Data":"81091973a03f1a7ed34ee1c20c7d9866385e2db236c909e90f7ac304645996f9"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.938619 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" event={"ID":"b2220da6-a793-45c1-923b-36bda543a176","Type":"ContainerStarted","Data":"02a884d9ca447ff5dc237ae82938b4805b92c07d13826dae5a032359bd40e7ef"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.940064 4911 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8tqzh container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.940159 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" podUID="b2220da6-a793-45c1-923b-36bda543a176" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.941526 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-dlswd" event={"ID":"02e23293-e9f2-4e5e-9aae-bdeb17c17823","Type":"ContainerStarted","Data":"a5b16babcf865e1070187e56964f71b520b12956b2cdc021d110358692bb1f16"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.944865 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-w544n" event={"ID":"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35","Type":"ContainerStarted","Data":"3f106c0a7d4672a4321af4a4aacecd8958f0b34103c9c51bdb219f6b9cf9ee7a"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.944922 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-w544n" event={"ID":"ede76ff2-387d-4778-b8f5-0dbbc5cf5c35","Type":"ContainerStarted","Data":"0426aa8c76f64d0a5b48282d6686157e694b1d164ed60fcf5b4ee5bbe2c68286"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.945992 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" podStartSLOduration=21.945965718 podStartE2EDuration="21.945965718s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:53.945281513 +0000 UTC m=+45.220707076" watchObservedRunningTime="2025-06-06 09:13:53.945965718 +0000 UTC m=+45.221391261" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.957836 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" event={"ID":"49294ad0-2f04-4c24-b5b5-974c2b3e5259","Type":"ContainerStarted","Data":"09773b3e0f2029d821d1d13dde6398a53bd961b8acc2e364f25b362ec5f4176d"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.957921 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" event={"ID":"b72e3dc5-6506-4f38-bd35-23abe3a44764","Type":"ContainerStarted","Data":"e2eaddbd066ae32736931c4befcb3cde0e4d8e269c4c0736aae6497e9371d914"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.957944 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" event={"ID":"5852e56b-1121-4a77-a104-225400bdf58b","Type":"ContainerStarted","Data":"739a1ff1960e5a9638d2ef0f2d897b0410144605fbefc50777de91effdb9e340"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.958347 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.960249 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" event={"ID":"79fabc0e-2c89-4889-9a07-afa5306b20f8","Type":"ContainerStarted","Data":"16ed46798a9c5316b149dd29cce6f99d1ef2ae504d5029c438dc081330e8cb07"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.960307 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" event={"ID":"79fabc0e-2c89-4889-9a07-afa5306b20f8","Type":"ContainerStarted","Data":"4d8c68b43d7fdc7c414f667e777d97c947453ed55dddfa5a4825813747e820e5"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.961077 4911 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-99c44 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.961152 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" podUID="5852e56b-1121-4a77-a104-225400bdf58b" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.962261 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xj4b2" event={"ID":"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614","Type":"ContainerStarted","Data":"c5fa686d398dc40e6307dc41fc3b1de96b8d21fd1345aa2a9d8cd662784bf261"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.962293 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xj4b2" event={"ID":"53b9bdc5-cb4e-4ee1-be2f-d58d0c554614","Type":"ContainerStarted","Data":"790b9ecc9f1909134a91b77b94a53a79d5469fb6e368f846e6f2149d2413b914"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.962695 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-xj4b2" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.963976 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" event={"ID":"e3c39a95-76c2-42b7-a8e8-e1379e1236bf","Type":"ContainerStarted","Data":"a1aa254587d947e60da5eb37deb86ce62cadecc6c949227f4ca40ed6171d7bef"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.966393 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" event={"ID":"2654cb62-4a89-4c50-ab33-27c9273b7e82","Type":"ContainerStarted","Data":"ddc4dce762c58697d3626283685b4e4bb030ac2624bdd1c26bc553542361dfbc"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.969756 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" event={"ID":"73326677-9e72-46c9-8dad-c0bfe47d599f","Type":"ContainerStarted","Data":"9a36977eadf19c10b7f563019711ddd8daf1ce81d44bd1cb42abe5a5d176ccdb"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.970588 4911 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-r4hl5 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" start-of-body= Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.970684 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" podUID="73326677-9e72-46c9-8dad-c0bfe47d599f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.974423 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" event={"ID":"3ed04786-6dd4-418a-a077-534b7e26fdab","Type":"ContainerStarted","Data":"e147b3d8b2b32b4647200c1b534adb4513b2f5b4fd241bc7971f16d69a9e812f"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.974566 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.977319 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" event={"ID":"f2018465-ff54-4f01-aef9-a87a2973d419","Type":"ContainerStarted","Data":"47d9e76943a75d9beab835805a08e29d42ef3ed004c16a72cf72ed5982fb06a9"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.977384 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" event={"ID":"f2018465-ff54-4f01-aef9-a87a2973d419","Type":"ContainerStarted","Data":"9c4436b8372f9592441eb79f206045e17e64c91b2202f329e189b799dd7099df"} Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.978630 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-ftvnt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.978717 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ftvnt" podUID="87f30b86-0303-493c-8919-e37e07f71709" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.978733 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsrd5" podStartSLOduration=21.978701207 podStartE2EDuration="21.978701207s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:53.975602567 +0000 UTC m=+45.251028120" watchObservedRunningTime="2025-06-06 09:13:53.978701207 +0000 UTC m=+45.254126750" Jun 06 09:13:53 crc kubenswrapper[4911]: I0606 09:13:53.992797 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.008396 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-dlswd" podStartSLOduration=9.008365087 podStartE2EDuration="9.008365087s" podCreationTimestamp="2025-06-06 09:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.005735628 +0000 UTC m=+45.281161171" watchObservedRunningTime="2025-06-06 09:13:54.008365087 +0000 UTC m=+45.283790630" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.015796 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.016108 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.516053211 +0000 UTC m=+45.791478764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.016399 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.021292 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.521266978 +0000 UTC m=+45.796692731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.036349 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-w544n" podStartSLOduration=23.036324468 podStartE2EDuration="23.036324468s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.036319438 +0000 UTC m=+45.311744981" watchObservedRunningTime="2025-06-06 09:13:54.036324468 +0000 UTC m=+45.311750011" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.064688 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" podStartSLOduration=22.064660098 podStartE2EDuration="22.064660098s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.058577381 +0000 UTC m=+45.334002934" watchObservedRunningTime="2025-06-06 09:13:54.064660098 +0000 UTC m=+45.340085641" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.107146 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-fghbg" podStartSLOduration=23.107127847 podStartE2EDuration="23.107127847s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.097467868 +0000 UTC m=+45.372893421" watchObservedRunningTime="2025-06-06 09:13:54.107127847 +0000 UTC m=+45.382553390" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.118453 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.119269 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-xj4b2" podStartSLOduration=9.11923823 podStartE2EDuration="9.11923823s" podCreationTimestamp="2025-06-06 09:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.118310249 +0000 UTC m=+45.393735802" watchObservedRunningTime="2025-06-06 09:13:54.11923823 +0000 UTC m=+45.394663773" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.120649 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.620622691 +0000 UTC m=+45.896048234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.183679 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fp5lv" podStartSLOduration=22.183656634 podStartE2EDuration="22.183656634s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.182661072 +0000 UTC m=+45.458086625" watchObservedRunningTime="2025-06-06 09:13:54.183656634 +0000 UTC m=+45.459082177" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.224449 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.224913 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.724892525 +0000 UTC m=+46.000318068 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.236447 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" podStartSLOduration=23.236417765 podStartE2EDuration="23.236417765s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.234117123 +0000 UTC m=+45.509542686" watchObservedRunningTime="2025-06-06 09:13:54.236417765 +0000 UTC m=+45.511843308" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.242347 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:54 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:54 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:54 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.242447 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.269207 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jtrxq" podStartSLOduration=23.269179305 podStartE2EDuration="23.269179305s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.266512035 +0000 UTC m=+45.541937578" watchObservedRunningTime="2025-06-06 09:13:54.269179305 +0000 UTC m=+45.544604848" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.325674 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.326716 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.826691853 +0000 UTC m=+46.102117396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.344574 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" podStartSLOduration=23.344552306 podStartE2EDuration="23.344552306s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.316386511 +0000 UTC m=+45.591812234" watchObservedRunningTime="2025-06-06 09:13:54.344552306 +0000 UTC m=+45.619977849" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.371564 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" podStartSLOduration=22.371536646 podStartE2EDuration="22.371536646s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.370324528 +0000 UTC m=+45.645750071" watchObservedRunningTime="2025-06-06 09:13:54.371536646 +0000 UTC m=+45.646962189" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.371787 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-pld6r" podStartSLOduration=22.371780521 podStartE2EDuration="22.371780521s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.341839075 +0000 UTC m=+45.617264618" watchObservedRunningTime="2025-06-06 09:13:54.371780521 +0000 UTC m=+45.647206074" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.393958 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mptr9" podStartSLOduration=22.393931991 podStartE2EDuration="22.393931991s" podCreationTimestamp="2025-06-06 09:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:54.389300487 +0000 UTC m=+45.664726050" watchObservedRunningTime="2025-06-06 09:13:54.393931991 +0000 UTC m=+45.669357534" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.428020 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.428423 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:54.928409529 +0000 UTC m=+46.203835072 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.529612 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.529823 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.029792518 +0000 UTC m=+46.305218061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.530032 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.530548 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.030524715 +0000 UTC m=+46.305950258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.631615 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.631804 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.13175981 +0000 UTC m=+46.407185353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.632023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.632418 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.132407365 +0000 UTC m=+46.407832908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.733609 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.733794 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.233766633 +0000 UTC m=+46.509192176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.734013 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.734377 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.234368626 +0000 UTC m=+46.509794169 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.835179 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.835433 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.335388607 +0000 UTC m=+46.610814160 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.835549 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.835986 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.33597857 +0000 UTC m=+46.611404123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.937200 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:54 crc kubenswrapper[4911]: E0606 09:13:54.937589 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.437568784 +0000 UTC m=+46.712994327 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.985569 4911 generic.go:334] "Generic (PLEG): container finished" podID="c36e9c0e-d7bb-4307-8767-c34651aeb7a8" containerID="9c4a8bf775467270a658e0d090270788dd8d491291cea4b29ce48b234c2661cb" exitCode=0 Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.985766 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" event={"ID":"c36e9c0e-d7bb-4307-8767-c34651aeb7a8","Type":"ContainerDied","Data":"9c4a8bf775467270a658e0d090270788dd8d491291cea4b29ce48b234c2661cb"} Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.989044 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" gracePeriod=30 Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.993355 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:13:54 crc kubenswrapper[4911]: I0606 09:13:54.993734 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8tqzh" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.002928 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-99c44" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.010652 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r4hl5" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.041782 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.042727 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.542703367 +0000 UTC m=+46.818128910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.148950 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.149314 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.649256352 +0000 UTC m=+46.924681905 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.149518 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.149985 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.649966298 +0000 UTC m=+46.925391851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.244007 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:55 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:55 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:55 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.244080 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.252237 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.252661 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.752636966 +0000 UTC m=+47.028062509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.354572 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.355224 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.85519472 +0000 UTC m=+47.130620263 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.456590 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.456926 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.956881256 +0000 UTC m=+47.232306809 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.457068 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.457600 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:55.957589692 +0000 UTC m=+47.233015235 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.474534 4911 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.559510 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.559771 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.059728548 +0000 UTC m=+47.335154091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.560362 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.560823 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.060803482 +0000 UTC m=+47.336229025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.663496 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.663721 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.163685544 +0000 UTC m=+47.439111097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.663775 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.664212 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.164201156 +0000 UTC m=+47.439626699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.764879 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.765199 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.265156485 +0000 UTC m=+47.540582028 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.765329 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.765673 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.265655386 +0000 UTC m=+47.541080939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.866799 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.867223 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.367206039 +0000 UTC m=+47.642631582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.903988 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xw8cr"] Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.905065 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.909075 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.931030 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xw8cr"] Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.968484 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-utilities\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.968545 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmz58\" (UniqueName: \"kubernetes.io/projected/c994037f-6a9e-4e7e-82ee-e390d62354e0-kube-api-access-bmz58\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.968594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-catalog-content\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.968633 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:55 crc kubenswrapper[4911]: E0606 09:13:55.969032 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.469014727 +0000 UTC m=+47.744440270 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.996024 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" event={"ID":"2654cb62-4a89-4c50-ab33-27c9273b7e82","Type":"ContainerStarted","Data":"f15dd9752726995272a947ca02fda2d2fcda9a441661e0b1306e443d8b5ff882"} Jun 06 09:13:55 crc kubenswrapper[4911]: I0606 09:13:55.996160 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" event={"ID":"2654cb62-4a89-4c50-ab33-27c9273b7e82","Type":"ContainerStarted","Data":"248c78187592c70d478092bde1c146fb77e12377c7c37e51fa3d3ed3d638f685"} Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.070694 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:56 crc kubenswrapper[4911]: E0606 09:13:56.070961 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.570926408 +0000 UTC m=+47.846351951 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.071350 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-catalog-content\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.071429 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.071825 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-utilities\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.071902 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmz58\" (UniqueName: \"kubernetes.io/projected/c994037f-6a9e-4e7e-82ee-e390d62354e0-kube-api-access-bmz58\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.071991 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-catalog-content\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:56 crc kubenswrapper[4911]: E0606 09:13:56.073154 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.573143188 +0000 UTC m=+47.848568731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.074056 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-utilities\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.097449 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmz58\" (UniqueName: \"kubernetes.io/projected/c994037f-6a9e-4e7e-82ee-e390d62354e0-kube-api-access-bmz58\") pod \"certified-operators-xw8cr\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.110840 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hb7s6"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.112639 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.116572 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.131472 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hb7s6"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.172866 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:56 crc kubenswrapper[4911]: E0606 09:13:56.173329 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.673266448 +0000 UTC m=+47.948692001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.215084 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.226716 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.238603 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:56 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:56 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:56 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.238700 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.275294 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.275361 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-catalog-content\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.275438 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29zrl\" (UniqueName: \"kubernetes.io/projected/4d92a6a8-bc23-4b3e-959d-75741ac051f3-kube-api-access-29zrl\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.275489 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-utilities\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: E0606 09:13:56.275836 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-06-06 09:13:56.775821243 +0000 UTC m=+48.051246786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8zkfh" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.291706 4911 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-06-06T09:13:55.474579235Z","Handler":null,"Name":""} Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.308387 4911 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.308438 4911 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.308529 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ms5n2"] Jun 06 09:13:56 crc kubenswrapper[4911]: E0606 09:13:56.308790 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36e9c0e-d7bb-4307-8767-c34651aeb7a8" containerName="collect-profiles" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.308817 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36e9c0e-d7bb-4307-8767-c34651aeb7a8" containerName="collect-profiles" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.308932 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c36e9c0e-d7bb-4307-8767-c34651aeb7a8" containerName="collect-profiles" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.310179 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.332040 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ms5n2"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.376641 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.376719 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-secret-volume\") pod \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.376765 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb4tz\" (UniqueName: \"kubernetes.io/projected/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-kube-api-access-vb4tz\") pod \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.376873 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-config-volume\") pod \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\" (UID: \"c36e9c0e-d7bb-4307-8767-c34651aeb7a8\") " Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377462 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-utilities\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377510 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-utilities\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377568 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-catalog-content\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377639 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqgf9\" (UniqueName: \"kubernetes.io/projected/c114b448-dfd7-45ec-8243-62fbdb0a8257-kube-api-access-lqgf9\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377674 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-catalog-content\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377707 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29zrl\" (UniqueName: \"kubernetes.io/projected/4d92a6a8-bc23-4b3e-959d-75741ac051f3-kube-api-access-29zrl\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377972 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-utilities\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.377989 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-config-volume" (OuterVolumeSpecName: "config-volume") pod "c36e9c0e-d7bb-4307-8767-c34651aeb7a8" (UID: "c36e9c0e-d7bb-4307-8767-c34651aeb7a8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.378253 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-catalog-content\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.400037 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29zrl\" (UniqueName: \"kubernetes.io/projected/4d92a6a8-bc23-4b3e-959d-75741ac051f3-kube-api-access-29zrl\") pod \"community-operators-hb7s6\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.415248 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c36e9c0e-d7bb-4307-8767-c34651aeb7a8" (UID: "c36e9c0e-d7bb-4307-8767-c34651aeb7a8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.415633 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-kube-api-access-vb4tz" (OuterVolumeSpecName: "kube-api-access-vb4tz") pod "c36e9c0e-d7bb-4307-8767-c34651aeb7a8" (UID: "c36e9c0e-d7bb-4307-8767-c34651aeb7a8"). InnerVolumeSpecName "kube-api-access-vb4tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.419482 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.437189 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.479888 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-utilities\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.480433 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.480517 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqgf9\" (UniqueName: \"kubernetes.io/projected/c114b448-dfd7-45ec-8243-62fbdb0a8257-kube-api-access-lqgf9\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.480550 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-catalog-content\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.480625 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.480638 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.480648 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb4tz\" (UniqueName: \"kubernetes.io/projected/c36e9c0e-d7bb-4307-8767-c34651aeb7a8-kube-api-access-vb4tz\") on node \"crc\" DevicePath \"\"" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.480823 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-utilities\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.481146 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-catalog-content\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.485379 4911 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.485420 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.508889 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqgf9\" (UniqueName: \"kubernetes.io/projected/c114b448-dfd7-45ec-8243-62fbdb0a8257-kube-api-access-lqgf9\") pod \"certified-operators-ms5n2\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.508927 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rm98f"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.511352 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.524240 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rm98f"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.529430 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8zkfh\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.557466 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xw8cr"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.573247 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.574611 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.579403 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.579548 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.581396 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-catalog-content\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.581479 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-utilities\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.581513 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7kb8\" (UniqueName: \"kubernetes.io/projected/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-kube-api-access-p7kb8\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.591427 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.640004 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.683468 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-utilities\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.683535 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7kb8\" (UniqueName: \"kubernetes.io/projected/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-kube-api-access-p7kb8\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.683581 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-catalog-content\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.683631 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.683683 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.684517 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-catalog-content\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.687008 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-utilities\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.709901 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7kb8\" (UniqueName: \"kubernetes.io/projected/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-kube-api-access-p7kb8\") pod \"community-operators-rm98f\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.737409 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hb7s6"] Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.786465 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.786907 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.786857 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.786993 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.808797 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.832711 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.902313 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:13:56 crc kubenswrapper[4911]: I0606 09:13:56.920902 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ms5n2"] Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.009123 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" event={"ID":"2654cb62-4a89-4c50-ab33-27c9273b7e82","Type":"ContainerStarted","Data":"ecf18920880a16ba431f6e01fac5f06653061e62220cdf428a674d1b41559f72"} Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.015027 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" event={"ID":"c36e9c0e-d7bb-4307-8767-c34651aeb7a8","Type":"ContainerDied","Data":"2bed004dff9831e699e3498b8d26f7fe8738aab3332d634385bbc28ea1857bc7"} Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.015073 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bed004dff9831e699e3498b8d26f7fe8738aab3332d634385bbc28ea1857bc7" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.015146 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.021280 4911 generic.go:334] "Generic (PLEG): container finished" podID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerID="af6b0d10ba7d600eabfa7c087157efb5c3111f963c4fd0d58aeb82bf9f054b6b" exitCode=0 Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.021499 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw8cr" event={"ID":"c994037f-6a9e-4e7e-82ee-e390d62354e0","Type":"ContainerDied","Data":"af6b0d10ba7d600eabfa7c087157efb5c3111f963c4fd0d58aeb82bf9f054b6b"} Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.021549 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw8cr" event={"ID":"c994037f-6a9e-4e7e-82ee-e390d62354e0","Type":"ContainerStarted","Data":"ba296a1ff876bb1ae0ff070e76df03975c38f307b7a57acd8b5094259ed345fe"} Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.023405 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.049580 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-5t4fd" podStartSLOduration=12.04956352 podStartE2EDuration="12.04956352s" podCreationTimestamp="2025-06-06 09:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:57.048854084 +0000 UTC m=+48.324279637" watchObservedRunningTime="2025-06-06 09:13:57.04956352 +0000 UTC m=+48.324989063" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.059937 4911 generic.go:334] "Generic (PLEG): container finished" podID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerID="ac2359254f9888a48857cf2ad2bbc25e6fbf287d7a3a5c725101746c0904d163" exitCode=0 Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.060029 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb7s6" event={"ID":"4d92a6a8-bc23-4b3e-959d-75741ac051f3","Type":"ContainerDied","Data":"ac2359254f9888a48857cf2ad2bbc25e6fbf287d7a3a5c725101746c0904d163"} Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.060055 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb7s6" event={"ID":"4d92a6a8-bc23-4b3e-959d-75741ac051f3","Type":"ContainerStarted","Data":"67907f81b43ce948a964c35097987125765537a916c7fe94e01dfa41189414b9"} Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.072431 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ms5n2" event={"ID":"c114b448-dfd7-45ec-8243-62fbdb0a8257","Type":"ContainerStarted","Data":"7e6b26f55ea763c5ab73c1804386a01efccaecd0d1445d3694fc85ac7cc019f9"} Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.087505 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8zkfh"] Jun 06 09:13:57 crc kubenswrapper[4911]: W0606 09:13:57.109382 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcfb5e53_2b1a_4a2d_8fd0_0299675ef90f.slice/crio-f5c65174ec3f00c6c2dc47e6d22679dfbc430141c6db2e26eef8b05b8e48e8ef WatchSource:0}: Error finding container f5c65174ec3f00c6c2dc47e6d22679dfbc430141c6db2e26eef8b05b8e48e8ef: Status 404 returned error can't find the container with id f5c65174ec3f00c6c2dc47e6d22679dfbc430141c6db2e26eef8b05b8e48e8ef Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.184973 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rm98f"] Jun 06 09:13:57 crc kubenswrapper[4911]: W0606 09:13:57.234493 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0c9e82b_5d39_44ae_b3fa_d96184d32d34.slice/crio-eec3aed3a53156a2f0f832887f3f044e7b1d6eee47db7364a10cd4ec07a3cad1 WatchSource:0}: Error finding container eec3aed3a53156a2f0f832887f3f044e7b1d6eee47db7364a10cd4ec07a3cad1: Status 404 returned error can't find the container with id eec3aed3a53156a2f0f832887f3f044e7b1d6eee47db7364a10cd4ec07a3cad1 Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.240562 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:57 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:57 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:57 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.240630 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.371494 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wb7fj" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.392570 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.392848 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.400710 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.400769 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.402693 4911 patch_prober.go:28] interesting pod/console-f9d7485db-4dcwp container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.402764 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-4dcwp" podUID="69127e92-f707-4b41-a690-9fd917998557" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.406672 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.416922 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.418738 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.418842 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:57 crc kubenswrapper[4911]: W0606 09:13:57.428001 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podfead550e_49c3_4e4e_b1c1_0dc5abb36c98.slice/crio-b95cde05ffa5cfbc9ab18d644406b98c9c5a66941009ac55c410551bfd10a25c WatchSource:0}: Error finding container b95cde05ffa5cfbc9ab18d644406b98c9c5a66941009ac55c410551bfd10a25c: Status 404 returned error can't find the container with id b95cde05ffa5cfbc9ab18d644406b98c9c5a66941009ac55c410551bfd10a25c Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.429950 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.604237 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.609046 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.612192 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.612188 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.613305 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.723241 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.723372 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.824882 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.825026 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.825112 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.848628 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.950045 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:13:57 crc kubenswrapper[4911]: I0606 09:13:57.957756 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.079603 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-ftvnt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.079614 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-ftvnt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.079656 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ftvnt" podUID="87f30b86-0303-493c-8919-e37e07f71709" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.079671 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-ftvnt" podUID="87f30b86-0303-493c-8919-e37e07f71709" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.086385 4911 generic.go:334] "Generic (PLEG): container finished" podID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerID="75739446b5efd283215533bb8c5ff3bdf6181b5044e2a718a5ec909cd39c3eec" exitCode=0 Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.086599 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ms5n2" event={"ID":"c114b448-dfd7-45ec-8243-62fbdb0a8257","Type":"ContainerDied","Data":"75739446b5efd283215533bb8c5ff3bdf6181b5044e2a718a5ec909cd39c3eec"} Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.099220 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerID="ec722eea201cefce09244828b7264ace2d09020ea5439db1cbefc02e8394b2d4" exitCode=0 Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.099980 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rm98f" event={"ID":"c0c9e82b-5d39-44ae-b3fa-d96184d32d34","Type":"ContainerDied","Data":"ec722eea201cefce09244828b7264ace2d09020ea5439db1cbefc02e8394b2d4"} Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.100068 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rm98f" event={"ID":"c0c9e82b-5d39-44ae-b3fa-d96184d32d34","Type":"ContainerStarted","Data":"eec3aed3a53156a2f0f832887f3f044e7b1d6eee47db7364a10cd4ec07a3cad1"} Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.107060 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mnsl7"] Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.108556 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.111917 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fead550e-49c3-4e4e-b1c1-0dc5abb36c98","Type":"ContainerStarted","Data":"8b5f7d331fa85ed6f38322bd9a271d04ff5ae13f33a75a9ae895cd6bb69020c2"} Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.111994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fead550e-49c3-4e4e-b1c1-0dc5abb36c98","Type":"ContainerStarted","Data":"b95cde05ffa5cfbc9ab18d644406b98c9c5a66941009ac55c410551bfd10a25c"} Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.114231 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.120671 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" event={"ID":"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f","Type":"ContainerStarted","Data":"0c53daa96c38e48f4177edb67a7631af5a633de7803534e2e5f6d8c10f55f998"} Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.120784 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" event={"ID":"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f","Type":"ContainerStarted","Data":"f5c65174ec3f00c6c2dc47e6d22679dfbc430141c6db2e26eef8b05b8e48e8ef"} Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.127639 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-82w5c" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.129514 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-rqvnb" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.130133 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mnsl7"] Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.187627 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.187603901 podStartE2EDuration="2.187603901s" podCreationTimestamp="2025-06-06 09:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:58.177572014 +0000 UTC m=+49.452997567" watchObservedRunningTime="2025-06-06 09:13:58.187603901 +0000 UTC m=+49.463029444" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.215738 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.240345 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.241186 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-catalog-content\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.241274 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgqjj\" (UniqueName: \"kubernetes.io/projected/64277bb9-7100-46a8-b522-b8774320c4e1-kube-api-access-tgqjj\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.241358 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-utilities\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.255824 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:58 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:58 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:58 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.255873 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.281992 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" podStartSLOduration=27.281970051 podStartE2EDuration="27.281970051s" podCreationTimestamp="2025-06-06 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:58.241868866 +0000 UTC m=+49.517294419" watchObservedRunningTime="2025-06-06 09:13:58.281970051 +0000 UTC m=+49.557395594" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.344736 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-catalog-content\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.344787 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgqjj\" (UniqueName: \"kubernetes.io/projected/64277bb9-7100-46a8-b522-b8774320c4e1-kube-api-access-tgqjj\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.344829 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-utilities\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.346203 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-catalog-content\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.349379 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-utilities\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: E0606 09:13:58.356049 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:13:58 crc kubenswrapper[4911]: E0606 09:13:58.357745 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:13:58 crc kubenswrapper[4911]: E0606 09:13:58.383328 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:13:58 crc kubenswrapper[4911]: E0606 09:13:58.383402 4911 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.386505 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgqjj\" (UniqueName: \"kubernetes.io/projected/64277bb9-7100-46a8-b522-b8774320c4e1-kube-api-access-tgqjj\") pod \"redhat-marketplace-mnsl7\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.452423 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.506467 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wlbkq"] Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.509410 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.529425 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wlbkq"] Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.650873 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-utilities\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.650965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld9vv\" (UniqueName: \"kubernetes.io/projected/515444b6-48c5-4d0b-9877-7b5b936b7bd0-kube-api-access-ld9vv\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.651292 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-catalog-content\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.733060 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mnsl7"] Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.754370 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-utilities\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.754432 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld9vv\" (UniqueName: \"kubernetes.io/projected/515444b6-48c5-4d0b-9877-7b5b936b7bd0-kube-api-access-ld9vv\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.754487 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-catalog-content\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.755319 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-catalog-content\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.755397 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-utilities\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.782492 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld9vv\" (UniqueName: \"kubernetes.io/projected/515444b6-48c5-4d0b-9877-7b5b936b7bd0-kube-api-access-ld9vv\") pod \"redhat-marketplace-wlbkq\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:58 crc kubenswrapper[4911]: I0606 09:13:58.844618 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.114288 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nr4x4"] Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.115484 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.117747 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.120932 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wlbkq"] Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.124247 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nr4x4"] Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.152245 4911 generic.go:334] "Generic (PLEG): container finished" podID="fead550e-49c3-4e4e-b1c1-0dc5abb36c98" containerID="8b5f7d331fa85ed6f38322bd9a271d04ff5ae13f33a75a9ae895cd6bb69020c2" exitCode=0 Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.152391 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fead550e-49c3-4e4e-b1c1-0dc5abb36c98","Type":"ContainerDied","Data":"8b5f7d331fa85ed6f38322bd9a271d04ff5ae13f33a75a9ae895cd6bb69020c2"} Jun 06 09:13:59 crc kubenswrapper[4911]: W0606 09:13:59.154380 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod515444b6_48c5_4d0b_9877_7b5b936b7bd0.slice/crio-106f38c34279beb87c2c2a10e4f44a609ba17a83d34562bcedaa9132f9611a62 WatchSource:0}: Error finding container 106f38c34279beb87c2c2a10e4f44a609ba17a83d34562bcedaa9132f9611a62: Status 404 returned error can't find the container with id 106f38c34279beb87c2c2a10e4f44a609ba17a83d34562bcedaa9132f9611a62 Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.155701 4911 generic.go:334] "Generic (PLEG): container finished" podID="64277bb9-7100-46a8-b522-b8774320c4e1" containerID="62e369214b99aca6d560934de68c37dfcf7701d604e8c572beb33d3082039205" exitCode=0 Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.155764 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mnsl7" event={"ID":"64277bb9-7100-46a8-b522-b8774320c4e1","Type":"ContainerDied","Data":"62e369214b99aca6d560934de68c37dfcf7701d604e8c572beb33d3082039205"} Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.155791 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mnsl7" event={"ID":"64277bb9-7100-46a8-b522-b8774320c4e1","Type":"ContainerStarted","Data":"49d55ed4bdf311fbe36148988baf034b7b2254a35e918716934082e3f335ba68"} Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.165632 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7","Type":"ContainerStarted","Data":"006c6136f7ce70764999c4104c028dff46abb98265402112eddcd228e016054b"} Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.166740 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.166777 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7","Type":"ContainerStarted","Data":"a50a309406e2a5284613ea3fe0197281d3de5e423f1ed3c06985cf4513330f7f"} Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.235794 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:13:59 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:13:59 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:13:59 crc kubenswrapper[4911]: healthz check failed Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.235979 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.239802 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.239788783 podStartE2EDuration="2.239788783s" podCreationTimestamp="2025-06-06 09:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:13:59.226241567 +0000 UTC m=+50.501667110" watchObservedRunningTime="2025-06-06 09:13:59.239788783 +0000 UTC m=+50.515214326" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.261488 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-catalog-content\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.261881 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-utilities\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.261930 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9pmz\" (UniqueName: \"kubernetes.io/projected/c0278664-45e1-4e8c-87fb-1674b538a207-kube-api-access-j9pmz\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.365299 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-utilities\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.365364 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9pmz\" (UniqueName: \"kubernetes.io/projected/c0278664-45e1-4e8c-87fb-1674b538a207-kube-api-access-j9pmz\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.365409 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-catalog-content\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.366353 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-catalog-content\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.366876 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-utilities\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.391925 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9pmz\" (UniqueName: \"kubernetes.io/projected/c0278664-45e1-4e8c-87fb-1674b538a207-kube-api-access-j9pmz\") pod \"redhat-operators-nr4x4\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.448414 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.540862 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pkxhs"] Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.558208 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.558777 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pkxhs"] Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.675999 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bv5d\" (UniqueName: \"kubernetes.io/projected/aa4023d1-b2f6-4276-86cf-baded9184c4f-kube-api-access-5bv5d\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.676498 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-utilities\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.676584 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-catalog-content\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.782697 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bv5d\" (UniqueName: \"kubernetes.io/projected/aa4023d1-b2f6-4276-86cf-baded9184c4f-kube-api-access-5bv5d\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.782782 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-utilities\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.782809 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-catalog-content\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.783453 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-catalog-content\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.783947 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-utilities\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.869436 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nr4x4"] Jun 06 09:13:59 crc kubenswrapper[4911]: I0606 09:13:59.903226 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bv5d\" (UniqueName: \"kubernetes.io/projected/aa4023d1-b2f6-4276-86cf-baded9184c4f-kube-api-access-5bv5d\") pod \"redhat-operators-pkxhs\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.181307 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nr4x4" event={"ID":"c0278664-45e1-4e8c-87fb-1674b538a207","Type":"ContainerStarted","Data":"b62b56fe1c217e8272d31085d4c692bcb5675ef2c1c9c23dd36dde0fb27e5cf7"} Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.183629 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wlbkq" event={"ID":"515444b6-48c5-4d0b-9877-7b5b936b7bd0","Type":"ContainerStarted","Data":"106f38c34279beb87c2c2a10e4f44a609ba17a83d34562bcedaa9132f9611a62"} Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.192357 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.241505 4911 patch_prober.go:28] interesting pod/router-default-5444994796-bn7hn container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jun 06 09:14:00 crc kubenswrapper[4911]: [-]has-synced failed: reason withheld Jun 06 09:14:00 crc kubenswrapper[4911]: [+]process-running ok Jun 06 09:14:00 crc kubenswrapper[4911]: healthz check failed Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.241575 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bn7hn" podUID="d6a72352-dbe3-43f1-aae4-c3a6bb160ff1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.468415 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.594253 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kubelet-dir\") pod \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.594435 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kube-api-access\") pod \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\" (UID: \"fead550e-49c3-4e4e-b1c1-0dc5abb36c98\") " Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.594621 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fead550e-49c3-4e4e-b1c1-0dc5abb36c98" (UID: "fead550e-49c3-4e4e-b1c1-0dc5abb36c98"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.594982 4911 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.603657 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fead550e-49c3-4e4e-b1c1-0dc5abb36c98" (UID: "fead550e-49c3-4e4e-b1c1-0dc5abb36c98"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.695764 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fead550e-49c3-4e4e-b1c1-0dc5abb36c98-kube-api-access\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:00 crc kubenswrapper[4911]: I0606 09:14:00.733296 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pkxhs"] Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.199585 4911 generic.go:334] "Generic (PLEG): container finished" podID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerID="4e2987e4da5116e3e36d5704479b7538d1bd7a326c0f87c31d2719ccb2689429" exitCode=0 Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.199720 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wlbkq" event={"ID":"515444b6-48c5-4d0b-9877-7b5b936b7bd0","Type":"ContainerDied","Data":"4e2987e4da5116e3e36d5704479b7538d1bd7a326c0f87c31d2719ccb2689429"} Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.208745 4911 generic.go:334] "Generic (PLEG): container finished" podID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerID="1c2911faef9b0a949b0e4ab5285620bea6c1227f397a9e0316e470224b1d7bfb" exitCode=0 Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.208820 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkxhs" event={"ID":"aa4023d1-b2f6-4276-86cf-baded9184c4f","Type":"ContainerDied","Data":"1c2911faef9b0a949b0e4ab5285620bea6c1227f397a9e0316e470224b1d7bfb"} Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.208849 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkxhs" event={"ID":"aa4023d1-b2f6-4276-86cf-baded9184c4f","Type":"ContainerStarted","Data":"ab74f891c2b40384fa130ae30685d437cdc4a276c06afc2ed876d4e1a06d32b1"} Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.216602 4911 generic.go:334] "Generic (PLEG): container finished" podID="6190a330-7a30-4d25-ac6d-0d4ac34ce0c7" containerID="006c6136f7ce70764999c4104c028dff46abb98265402112eddcd228e016054b" exitCode=0 Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.216740 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7","Type":"ContainerDied","Data":"006c6136f7ce70764999c4104c028dff46abb98265402112eddcd228e016054b"} Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.226071 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fead550e-49c3-4e4e-b1c1-0dc5abb36c98","Type":"ContainerDied","Data":"b95cde05ffa5cfbc9ab18d644406b98c9c5a66941009ac55c410551bfd10a25c"} Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.226136 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b95cde05ffa5cfbc9ab18d644406b98c9c5a66941009ac55c410551bfd10a25c" Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.226168 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.237564 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.242655 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-bn7hn" Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.248688 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0278664-45e1-4e8c-87fb-1674b538a207" containerID="db71c70584a7fade3e07f719ff2af36586ce97ec2ea4d6a5fcc86e9103eeadde" exitCode=0 Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.248745 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nr4x4" event={"ID":"c0278664-45e1-4e8c-87fb-1674b538a207","Type":"ContainerDied","Data":"db71c70584a7fade3e07f719ff2af36586ce97ec2ea4d6a5fcc86e9103eeadde"} Jun 06 09:14:01 crc kubenswrapper[4911]: I0606 09:14:01.511243 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jun 06 09:14:02 crc kubenswrapper[4911]: I0606 09:14:02.537907 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:14:02 crc kubenswrapper[4911]: I0606 09:14:02.639927 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kubelet-dir\") pod \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " Jun 06 09:14:02 crc kubenswrapper[4911]: I0606 09:14:02.640069 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kube-api-access\") pod \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\" (UID: \"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7\") " Jun 06 09:14:02 crc kubenswrapper[4911]: I0606 09:14:02.642506 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6190a330-7a30-4d25-ac6d-0d4ac34ce0c7" (UID: "6190a330-7a30-4d25-ac6d-0d4ac34ce0c7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:14:02 crc kubenswrapper[4911]: I0606 09:14:02.659496 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6190a330-7a30-4d25-ac6d-0d4ac34ce0c7" (UID: "6190a330-7a30-4d25-ac6d-0d4ac34ce0c7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:14:02 crc kubenswrapper[4911]: I0606 09:14:02.742718 4911 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:02 crc kubenswrapper[4911]: I0606 09:14:02.742784 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6190a330-7a30-4d25-ac6d-0d4ac34ce0c7-kube-api-access\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:03 crc kubenswrapper[4911]: I0606 09:14:03.269767 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6190a330-7a30-4d25-ac6d-0d4ac34ce0c7","Type":"ContainerDied","Data":"a50a309406e2a5284613ea3fe0197281d3de5e423f1ed3c06985cf4513330f7f"} Jun 06 09:14:03 crc kubenswrapper[4911]: I0606 09:14:03.269810 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a50a309406e2a5284613ea3fe0197281d3de5e423f1ed3c06985cf4513330f7f" Jun 06 09:14:03 crc kubenswrapper[4911]: I0606 09:14:03.269883 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jun 06 09:14:03 crc kubenswrapper[4911]: I0606 09:14:03.658145 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-xj4b2" Jun 06 09:14:05 crc kubenswrapper[4911]: I0606 09:14:05.230743 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jun 06 09:14:05 crc kubenswrapper[4911]: I0606 09:14:05.249907 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jun 06 09:14:07 crc kubenswrapper[4911]: I0606 09:14:07.423293 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:14:07 crc kubenswrapper[4911]: I0606 09:14:07.428772 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:14:07 crc kubenswrapper[4911]: I0606 09:14:07.445023 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=2.444490459 podStartE2EDuration="2.444490459s" podCreationTimestamp="2025-06-06 09:14:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:14:07.443959237 +0000 UTC m=+58.719384780" watchObservedRunningTime="2025-06-06 09:14:07.444490459 +0000 UTC m=+58.719916022" Jun 06 09:14:08 crc kubenswrapper[4911]: I0606 09:14:08.079934 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-ftvnt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jun 06 09:14:08 crc kubenswrapper[4911]: I0606 09:14:08.080008 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ftvnt" podUID="87f30b86-0303-493c-8919-e37e07f71709" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jun 06 09:14:08 crc kubenswrapper[4911]: I0606 09:14:08.080043 4911 patch_prober.go:28] interesting pod/downloads-7954f5f757-ftvnt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jun 06 09:14:08 crc kubenswrapper[4911]: I0606 09:14:08.080133 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-ftvnt" podUID="87f30b86-0303-493c-8919-e37e07f71709" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jun 06 09:14:08 crc kubenswrapper[4911]: E0606 09:14:08.355274 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:08 crc kubenswrapper[4911]: E0606 09:14:08.357268 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:08 crc kubenswrapper[4911]: E0606 09:14:08.359828 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:08 crc kubenswrapper[4911]: E0606 09:14:08.359889 4911 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:14:16 crc kubenswrapper[4911]: I0606 09:14:16.791934 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:14:18 crc kubenswrapper[4911]: I0606 09:14:18.094756 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-ftvnt" Jun 06 09:14:18 crc kubenswrapper[4911]: E0606 09:14:18.355105 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:18 crc kubenswrapper[4911]: E0606 09:14:18.356840 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:18 crc kubenswrapper[4911]: E0606 09:14:18.358368 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:18 crc kubenswrapper[4911]: E0606 09:14:18.358444 4911 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:14:18 crc kubenswrapper[4911]: I0606 09:14:18.968922 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jun 06 09:14:19 crc kubenswrapper[4911]: I0606 09:14:19.968382 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.968362121 podStartE2EDuration="1.968362121s" podCreationTimestamp="2025-06-06 09:14:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:14:19.965484226 +0000 UTC m=+71.240909769" watchObservedRunningTime="2025-06-06 09:14:19.968362121 +0000 UTC m=+71.243787664" Jun 06 09:14:22 crc kubenswrapper[4911]: E0606 09:14:22.762974 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jun 06 09:14:22 crc kubenswrapper[4911]: E0606 09:14:22.764529 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p7kb8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rm98f_openshift-marketplace(c0c9e82b-5d39-44ae-b3fa-d96184d32d34): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:22 crc kubenswrapper[4911]: E0606 09:14:22.765779 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rm98f" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" Jun 06 09:14:26 crc kubenswrapper[4911]: I0606 09:14:26.407344 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-8pgq8_1d7622e9-e3cd-4bc9-921b-c5d7ef47f515/kube-multus-additional-cni-plugins/0.log" Jun 06 09:14:26 crc kubenswrapper[4911]: I0606 09:14:26.407586 4911 generic.go:334] "Generic (PLEG): container finished" podID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" exitCode=137 Jun 06 09:14:26 crc kubenswrapper[4911]: I0606 09:14:26.407619 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" event={"ID":"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515","Type":"ContainerDied","Data":"31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9"} Jun 06 09:14:28 crc kubenswrapper[4911]: E0606 09:14:28.352759 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:28 crc kubenswrapper[4911]: E0606 09:14:28.353647 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:28 crc kubenswrapper[4911]: E0606 09:14:28.354084 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:28 crc kubenswrapper[4911]: E0606 09:14:28.354133 4911 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:14:28 crc kubenswrapper[4911]: I0606 09:14:28.565552 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tgj9w" Jun 06 09:14:28 crc kubenswrapper[4911]: I0606 09:14:28.754499 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jun 06 09:14:31 crc kubenswrapper[4911]: E0606 09:14:31.259962 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rm98f" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" Jun 06 09:14:31 crc kubenswrapper[4911]: E0606 09:14:31.310349 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jun 06 09:14:31 crc kubenswrapper[4911]: E0606 09:14:31.310625 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tgqjj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mnsl7_openshift-marketplace(64277bb9-7100-46a8-b522-b8774320c4e1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:31 crc kubenswrapper[4911]: E0606 09:14:31.311907 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mnsl7" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" Jun 06 09:14:31 crc kubenswrapper[4911]: E0606 09:14:31.939713 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jun 06 09:14:31 crc kubenswrapper[4911]: E0606 09:14:31.939882 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-29zrl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-hb7s6_openshift-marketplace(4d92a6a8-bc23-4b3e-959d-75741ac051f3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:31 crc kubenswrapper[4911]: E0606 09:14:31.941071 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-hb7s6" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" Jun 06 09:14:38 crc kubenswrapper[4911]: E0606 09:14:38.352461 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:38 crc kubenswrapper[4911]: E0606 09:14:38.353643 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:38 crc kubenswrapper[4911]: E0606 09:14:38.354132 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:38 crc kubenswrapper[4911]: E0606 09:14:38.354191 4911 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:14:48 crc kubenswrapper[4911]: E0606 09:14:48.351712 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:48 crc kubenswrapper[4911]: E0606 09:14:48.352800 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:48 crc kubenswrapper[4911]: E0606 09:14:48.353197 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" cmd=["/bin/bash","-c","test -f /ready/ready"] Jun 06 09:14:48 crc kubenswrapper[4911]: E0606 09:14:48.353259 4911 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.474882 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.475526 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ld9vv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wlbkq_openshift-marketplace(515444b6-48c5-4d0b-9877-7b5b936b7bd0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.476783 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-wlbkq" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.565265 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.565443 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lqgf9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-ms5n2_openshift-marketplace(c114b448-dfd7-45ec-8243-62fbdb0a8257): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.566892 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-ms5n2" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.896460 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.896619 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bmz58,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-xw8cr_openshift-marketplace(c994037f-6a9e-4e7e-82ee-e390d62354e0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:50 crc kubenswrapper[4911]: E0606 09:14:50.897797 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-xw8cr" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.257366 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wlbkq" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.257676 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-ms5n2" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.260127 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-xw8cr" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.281922 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.282622 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5bv5d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-pkxhs_openshift-marketplace(aa4023d1-b2f6-4276-86cf-baded9184c4f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.283983 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-pkxhs" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.285816 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.285988 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j9pmz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-nr4x4_openshift-marketplace(c0278664-45e1-4e8c-87fb-1674b538a207): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.287173 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-nr4x4" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.306056 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-8pgq8_1d7622e9-e3cd-4bc9-921b-c5d7ef47f515/kube-multus-additional-cni-plugins/0.log" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.306148 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.358598 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-cni-sysctl-allowlist\") pod \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.358845 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4xhs\" (UniqueName: \"kubernetes.io/projected/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-kube-api-access-l4xhs\") pod \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.358982 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-tuning-conf-dir\") pod \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.359036 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" (UID: "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.359122 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-ready\") pod \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\" (UID: \"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515\") " Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.359506 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-ready" (OuterVolumeSpecName: "ready") pod "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" (UID: "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.359506 4911 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.359692 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" (UID: "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.366555 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-kube-api-access-l4xhs" (OuterVolumeSpecName: "kube-api-access-l4xhs") pod "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" (UID: "1d7622e9-e3cd-4bc9-921b-c5d7ef47f515"). InnerVolumeSpecName "kube-api-access-l4xhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.461088 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4xhs\" (UniqueName: \"kubernetes.io/projected/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-kube-api-access-l4xhs\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.461214 4911 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-ready\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.461236 4911 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.561978 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-8pgq8_1d7622e9-e3cd-4bc9-921b-c5d7ef47f515/kube-multus-additional-cni-plugins/0.log" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.562053 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" event={"ID":"1d7622e9-e3cd-4bc9-921b-c5d7ef47f515","Type":"ContainerDied","Data":"a17ac789870e1143711506905d9ed79525639f23ed2c7d7834688d1a9a8854c4"} Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.562143 4911 scope.go:117] "RemoveContainer" containerID="31ca9701d7afe5db605b6d72488f22bdca9e4fd8469bedcb76790aefec6342a9" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.562349 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-8pgq8" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.564128 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-pkxhs" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" Jun 06 09:14:56 crc kubenswrapper[4911]: E0606 09:14:56.565339 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-nr4x4" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.625362 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-8pgq8"] Jun 06 09:14:56 crc kubenswrapper[4911]: I0606 09:14:56.628768 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-8pgq8"] Jun 06 09:14:57 crc kubenswrapper[4911]: I0606 09:14:57.956064 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" path="/var/lib/kubelet/pods/1d7622e9-e3cd-4bc9-921b-c5d7ef47f515/volumes" Jun 06 09:14:59 crc kubenswrapper[4911]: I0606 09:14:59.583302 4911 generic.go:334] "Generic (PLEG): container finished" podID="64277bb9-7100-46a8-b522-b8774320c4e1" containerID="a69c15a3e885cf81926c99284dde9c01f76ce8b0ead553bad2d5f17a6b63174a" exitCode=0 Jun 06 09:14:59 crc kubenswrapper[4911]: I0606 09:14:59.583408 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mnsl7" event={"ID":"64277bb9-7100-46a8-b522-b8774320c4e1","Type":"ContainerDied","Data":"a69c15a3e885cf81926c99284dde9c01f76ce8b0ead553bad2d5f17a6b63174a"} Jun 06 09:14:59 crc kubenswrapper[4911]: I0606 09:14:59.587345 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerID="2148afa5f79cb36494879624344ba2aa847c7a4d954d5c3aea0ab3ed4f3e37af" exitCode=0 Jun 06 09:14:59 crc kubenswrapper[4911]: I0606 09:14:59.587405 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rm98f" event={"ID":"c0c9e82b-5d39-44ae-b3fa-d96184d32d34","Type":"ContainerDied","Data":"2148afa5f79cb36494879624344ba2aa847c7a4d954d5c3aea0ab3ed4f3e37af"} Jun 06 09:14:59 crc kubenswrapper[4911]: I0606 09:14:59.590480 4911 generic.go:334] "Generic (PLEG): container finished" podID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerID="b7513562b45533f69a5cb1c0c190db29cda5efe5181f45eef2b4ccca5b5b84c0" exitCode=0 Jun 06 09:14:59 crc kubenswrapper[4911]: I0606 09:14:59.590507 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb7s6" event={"ID":"4d92a6a8-bc23-4b3e-959d-75741ac051f3","Type":"ContainerDied","Data":"b7513562b45533f69a5cb1c0c190db29cda5efe5181f45eef2b4ccca5b5b84c0"} Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.144346 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn"] Jun 06 09:15:00 crc kubenswrapper[4911]: E0606 09:15:00.144676 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.144695 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:15:00 crc kubenswrapper[4911]: E0606 09:15:00.144720 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6190a330-7a30-4d25-ac6d-0d4ac34ce0c7" containerName="pruner" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.144727 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6190a330-7a30-4d25-ac6d-0d4ac34ce0c7" containerName="pruner" Jun 06 09:15:00 crc kubenswrapper[4911]: E0606 09:15:00.144739 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fead550e-49c3-4e4e-b1c1-0dc5abb36c98" containerName="pruner" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.144749 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fead550e-49c3-4e4e-b1c1-0dc5abb36c98" containerName="pruner" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.144898 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d7622e9-e3cd-4bc9-921b-c5d7ef47f515" containerName="kube-multus-additional-cni-plugins" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.144915 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6190a330-7a30-4d25-ac6d-0d4ac34ce0c7" containerName="pruner" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.144928 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fead550e-49c3-4e4e-b1c1-0dc5abb36c98" containerName="pruner" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.145380 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.151472 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.151485 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.156197 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn"] Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.314420 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-secret-volume\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.314948 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-config-volume\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.315063 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s874m\" (UniqueName: \"kubernetes.io/projected/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-kube-api-access-s874m\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.416843 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s874m\" (UniqueName: \"kubernetes.io/projected/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-kube-api-access-s874m\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.416921 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-secret-volume\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.416946 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-config-volume\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.417840 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-config-volume\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.425972 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-secret-volume\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.436033 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s874m\" (UniqueName: \"kubernetes.io/projected/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-kube-api-access-s874m\") pod \"collect-profiles-29153355-cvwzn\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.470160 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.601620 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mnsl7" event={"ID":"64277bb9-7100-46a8-b522-b8774320c4e1","Type":"ContainerStarted","Data":"a4453c7991f177096ef665917199f165863cbd093aec21b58476a73b16a15014"} Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.609621 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb7s6" event={"ID":"4d92a6a8-bc23-4b3e-959d-75741ac051f3","Type":"ContainerStarted","Data":"70133e568d040b7873a4c0250fb97a6280d07b95d809d9fe86265dfd4f19699d"} Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.649981 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mnsl7" podStartSLOduration=1.638620256 podStartE2EDuration="1m2.649938744s" podCreationTimestamp="2025-06-06 09:13:58 +0000 UTC" firstStartedPulling="2025-06-06 09:13:59.157528916 +0000 UTC m=+50.432954459" lastFinishedPulling="2025-06-06 09:15:00.168847404 +0000 UTC m=+111.444272947" observedRunningTime="2025-06-06 09:15:00.62330186 +0000 UTC m=+111.898727413" watchObservedRunningTime="2025-06-06 09:15:00.649938744 +0000 UTC m=+111.925364287" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.655334 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hb7s6" podStartSLOduration=1.487906589 podStartE2EDuration="1m4.655297274s" podCreationTimestamp="2025-06-06 09:13:56 +0000 UTC" firstStartedPulling="2025-06-06 09:13:57.068900237 +0000 UTC m=+48.344325780" lastFinishedPulling="2025-06-06 09:15:00.236290922 +0000 UTC m=+111.511716465" observedRunningTime="2025-06-06 09:15:00.645998881 +0000 UTC m=+111.921424424" watchObservedRunningTime="2025-06-06 09:15:00.655297274 +0000 UTC m=+111.930722817" Jun 06 09:15:00 crc kubenswrapper[4911]: I0606 09:15:00.899008 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn"] Jun 06 09:15:01 crc kubenswrapper[4911]: I0606 09:15:01.618342 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rm98f" event={"ID":"c0c9e82b-5d39-44ae-b3fa-d96184d32d34","Type":"ContainerStarted","Data":"4dd241e360e79862deb496954cc347de6c781256242ba31a799d91ba038e01e2"} Jun 06 09:15:01 crc kubenswrapper[4911]: I0606 09:15:01.620338 4911 generic.go:334] "Generic (PLEG): container finished" podID="4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" containerID="4e96101733ef9f53afcc5767ae8295ecd736ff7985e2ffe83b7e612a4f0ea6e4" exitCode=0 Jun 06 09:15:01 crc kubenswrapper[4911]: I0606 09:15:01.620556 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" event={"ID":"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016","Type":"ContainerDied","Data":"4e96101733ef9f53afcc5767ae8295ecd736ff7985e2ffe83b7e612a4f0ea6e4"} Jun 06 09:15:01 crc kubenswrapper[4911]: I0606 09:15:01.620695 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" event={"ID":"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016","Type":"ContainerStarted","Data":"8b35ff2f53a129d68e70cf4d9e77f93628ad95a82a01ba4c14602785d775b613"} Jun 06 09:15:01 crc kubenswrapper[4911]: I0606 09:15:01.642140 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rm98f" podStartSLOduration=3.359089629 podStartE2EDuration="1m5.642071635s" podCreationTimestamp="2025-06-06 09:13:56 +0000 UTC" firstStartedPulling="2025-06-06 09:13:58.103926952 +0000 UTC m=+49.379352495" lastFinishedPulling="2025-06-06 09:15:00.386908958 +0000 UTC m=+111.662334501" observedRunningTime="2025-06-06 09:15:01.639711554 +0000 UTC m=+112.915137117" watchObservedRunningTime="2025-06-06 09:15:01.642071635 +0000 UTC m=+112.917497188" Jun 06 09:15:02 crc kubenswrapper[4911]: I0606 09:15:02.877958 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.054489 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-secret-volume\") pod \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.054607 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-config-volume\") pod \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.054846 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s874m\" (UniqueName: \"kubernetes.io/projected/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-kube-api-access-s874m\") pod \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\" (UID: \"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016\") " Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.055761 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-config-volume" (OuterVolumeSpecName: "config-volume") pod "4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" (UID: "4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.062009 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" (UID: "4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.062119 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-kube-api-access-s874m" (OuterVolumeSpecName: "kube-api-access-s874m") pod "4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" (UID: "4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016"). InnerVolumeSpecName "kube-api-access-s874m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.156650 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s874m\" (UniqueName: \"kubernetes.io/projected/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-kube-api-access-s874m\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.156703 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.156713 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.636855 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" event={"ID":"4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016","Type":"ContainerDied","Data":"8b35ff2f53a129d68e70cf4d9e77f93628ad95a82a01ba4c14602785d775b613"} Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.636915 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b35ff2f53a129d68e70cf4d9e77f93628ad95a82a01ba4c14602785d775b613" Jun 06 09:15:03 crc kubenswrapper[4911]: I0606 09:15:03.636968 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn" Jun 06 09:15:06 crc kubenswrapper[4911]: I0606 09:15:06.438888 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:15:06 crc kubenswrapper[4911]: I0606 09:15:06.440497 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:15:06 crc kubenswrapper[4911]: I0606 09:15:06.638253 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:15:06 crc kubenswrapper[4911]: I0606 09:15:06.688187 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:15:06 crc kubenswrapper[4911]: I0606 09:15:06.833921 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:15:06 crc kubenswrapper[4911]: I0606 09:15:06.833998 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:15:06 crc kubenswrapper[4911]: I0606 09:15:06.874912 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:15:07 crc kubenswrapper[4911]: I0606 09:15:07.694150 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:15:08 crc kubenswrapper[4911]: I0606 09:15:08.452594 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:15:08 crc kubenswrapper[4911]: I0606 09:15:08.452678 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:15:08 crc kubenswrapper[4911]: I0606 09:15:08.497672 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:15:08 crc kubenswrapper[4911]: I0606 09:15:08.703013 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:15:09 crc kubenswrapper[4911]: I0606 09:15:09.677066 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rm98f"] Jun 06 09:15:09 crc kubenswrapper[4911]: I0606 09:15:09.677729 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rm98f" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="registry-server" containerID="cri-o://4dd241e360e79862deb496954cc347de6c781256242ba31a799d91ba038e01e2" gracePeriod=2 Jun 06 09:15:11 crc kubenswrapper[4911]: I0606 09:15:11.688240 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerID="4dd241e360e79862deb496954cc347de6c781256242ba31a799d91ba038e01e2" exitCode=0 Jun 06 09:15:11 crc kubenswrapper[4911]: I0606 09:15:11.688664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rm98f" event={"ID":"c0c9e82b-5d39-44ae-b3fa-d96184d32d34","Type":"ContainerDied","Data":"4dd241e360e79862deb496954cc347de6c781256242ba31a799d91ba038e01e2"} Jun 06 09:15:13 crc kubenswrapper[4911]: I0606 09:15:13.792395 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:15:13 crc kubenswrapper[4911]: I0606 09:15:13.926894 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-catalog-content\") pod \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " Jun 06 09:15:13 crc kubenswrapper[4911]: I0606 09:15:13.926967 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7kb8\" (UniqueName: \"kubernetes.io/projected/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-kube-api-access-p7kb8\") pod \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " Jun 06 09:15:13 crc kubenswrapper[4911]: I0606 09:15:13.927078 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-utilities\") pod \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\" (UID: \"c0c9e82b-5d39-44ae-b3fa-d96184d32d34\") " Jun 06 09:15:13 crc kubenswrapper[4911]: I0606 09:15:13.929212 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-utilities" (OuterVolumeSpecName: "utilities") pod "c0c9e82b-5d39-44ae-b3fa-d96184d32d34" (UID: "c0c9e82b-5d39-44ae-b3fa-d96184d32d34"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:13 crc kubenswrapper[4911]: I0606 09:15:13.958233 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-kube-api-access-p7kb8" (OuterVolumeSpecName: "kube-api-access-p7kb8") pod "c0c9e82b-5d39-44ae-b3fa-d96184d32d34" (UID: "c0c9e82b-5d39-44ae-b3fa-d96184d32d34"). InnerVolumeSpecName "kube-api-access-p7kb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:15:13 crc kubenswrapper[4911]: I0606 09:15:13.974071 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0c9e82b-5d39-44ae-b3fa-d96184d32d34" (UID: "c0c9e82b-5d39-44ae-b3fa-d96184d32d34"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.028827 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.028898 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.028937 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7kb8\" (UniqueName: \"kubernetes.io/projected/c0c9e82b-5d39-44ae-b3fa-d96184d32d34-kube-api-access-p7kb8\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.706964 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkxhs" event={"ID":"aa4023d1-b2f6-4276-86cf-baded9184c4f","Type":"ContainerStarted","Data":"9e2b9158561f4553b2ca40c88440407ae9ef765069e282446f5436bad2fc1269"} Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.710596 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rm98f" event={"ID":"c0c9e82b-5d39-44ae-b3fa-d96184d32d34","Type":"ContainerDied","Data":"eec3aed3a53156a2f0f832887f3f044e7b1d6eee47db7364a10cd4ec07a3cad1"} Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.710685 4911 scope.go:117] "RemoveContainer" containerID="4dd241e360e79862deb496954cc347de6c781256242ba31a799d91ba038e01e2" Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.710889 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rm98f" Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.715164 4911 generic.go:334] "Generic (PLEG): container finished" podID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerID="8d976b2bcc1e0ecbd596cfd59d48b7ca96a9bfa61216a61d6c777236aff24a52" exitCode=0 Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.715236 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw8cr" event={"ID":"c994037f-6a9e-4e7e-82ee-e390d62354e0","Type":"ContainerDied","Data":"8d976b2bcc1e0ecbd596cfd59d48b7ca96a9bfa61216a61d6c777236aff24a52"} Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.717433 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nr4x4" event={"ID":"c0278664-45e1-4e8c-87fb-1674b538a207","Type":"ContainerStarted","Data":"d129a3351b5c5c45fec902aded51ea00632d3b7d2b710dd569affa282a2c5b95"} Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.722801 4911 generic.go:334] "Generic (PLEG): container finished" podID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerID="e210a84fd85a11cc0d1256a25c12e1b9241a961dcf6f31de9f589d2009731ac6" exitCode=0 Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.722892 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ms5n2" event={"ID":"c114b448-dfd7-45ec-8243-62fbdb0a8257","Type":"ContainerDied","Data":"e210a84fd85a11cc0d1256a25c12e1b9241a961dcf6f31de9f589d2009731ac6"} Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.734538 4911 generic.go:334] "Generic (PLEG): container finished" podID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerID="a5d3986eebabe987c5d189c3e0beefe83d88bcb07b7944c85e0331b5f6ee1635" exitCode=0 Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.734957 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wlbkq" event={"ID":"515444b6-48c5-4d0b-9877-7b5b936b7bd0","Type":"ContainerDied","Data":"a5d3986eebabe987c5d189c3e0beefe83d88bcb07b7944c85e0331b5f6ee1635"} Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.825594 4911 scope.go:117] "RemoveContainer" containerID="2148afa5f79cb36494879624344ba2aa847c7a4d954d5c3aea0ab3ed4f3e37af" Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.851642 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rm98f"] Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.856338 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rm98f"] Jun 06 09:15:14 crc kubenswrapper[4911]: I0606 09:15:14.865224 4911 scope.go:117] "RemoveContainer" containerID="ec722eea201cefce09244828b7264ace2d09020ea5439db1cbefc02e8394b2d4" Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.744972 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw8cr" event={"ID":"c994037f-6a9e-4e7e-82ee-e390d62354e0","Type":"ContainerStarted","Data":"5b44cfa8660718291257d8fb31d036c0915a5d7839c5ecf523e69c59e79ea909"} Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.749711 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0278664-45e1-4e8c-87fb-1674b538a207" containerID="d129a3351b5c5c45fec902aded51ea00632d3b7d2b710dd569affa282a2c5b95" exitCode=0 Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.749854 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nr4x4" event={"ID":"c0278664-45e1-4e8c-87fb-1674b538a207","Type":"ContainerDied","Data":"d129a3351b5c5c45fec902aded51ea00632d3b7d2b710dd569affa282a2c5b95"} Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.753657 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ms5n2" event={"ID":"c114b448-dfd7-45ec-8243-62fbdb0a8257","Type":"ContainerStarted","Data":"03b3c3b468d498af73854d244b43f076b172f71bb88c5a1b218cf490c356c8d5"} Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.758577 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wlbkq" event={"ID":"515444b6-48c5-4d0b-9877-7b5b936b7bd0","Type":"ContainerStarted","Data":"8e91488de53172d4ae4ec23f62f6e39e6686ae6bb6cf4d871bee3a3e8f3020fd"} Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.762519 4911 generic.go:334] "Generic (PLEG): container finished" podID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerID="9e2b9158561f4553b2ca40c88440407ae9ef765069e282446f5436bad2fc1269" exitCode=0 Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.762591 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkxhs" event={"ID":"aa4023d1-b2f6-4276-86cf-baded9184c4f","Type":"ContainerDied","Data":"9e2b9158561f4553b2ca40c88440407ae9ef765069e282446f5436bad2fc1269"} Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.782729 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xw8cr" podStartSLOduration=2.593661265 podStartE2EDuration="1m20.782699621s" podCreationTimestamp="2025-06-06 09:13:55 +0000 UTC" firstStartedPulling="2025-06-06 09:13:57.023124143 +0000 UTC m=+48.298549686" lastFinishedPulling="2025-06-06 09:15:15.212162489 +0000 UTC m=+126.487588042" observedRunningTime="2025-06-06 09:15:15.777565817 +0000 UTC m=+127.052991380" watchObservedRunningTime="2025-06-06 09:15:15.782699621 +0000 UTC m=+127.058125164" Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.844334 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ms5n2" podStartSLOduration=2.6034784589999997 podStartE2EDuration="1m19.844310907s" podCreationTimestamp="2025-06-06 09:13:56 +0000 UTC" firstStartedPulling="2025-06-06 09:13:58.093755012 +0000 UTC m=+49.369180555" lastFinishedPulling="2025-06-06 09:15:15.33458747 +0000 UTC m=+126.610013003" observedRunningTime="2025-06-06 09:15:15.842037918 +0000 UTC m=+127.117463471" watchObservedRunningTime="2025-06-06 09:15:15.844310907 +0000 UTC m=+127.119736450" Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.864541 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wlbkq" podStartSLOduration=3.765520519 podStartE2EDuration="1m17.864515674s" podCreationTimestamp="2025-06-06 09:13:58 +0000 UTC" firstStartedPulling="2025-06-06 09:14:01.207346479 +0000 UTC m=+52.482772032" lastFinishedPulling="2025-06-06 09:15:15.306341644 +0000 UTC m=+126.581767187" observedRunningTime="2025-06-06 09:15:15.86205824 +0000 UTC m=+127.137483803" watchObservedRunningTime="2025-06-06 09:15:15.864515674 +0000 UTC m=+127.139941217" Jun 06 09:15:15 crc kubenswrapper[4911]: I0606 09:15:15.956146 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" path="/var/lib/kubelet/pods/c0c9e82b-5d39-44ae-b3fa-d96184d32d34/volumes" Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.226980 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.227243 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.640492 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.641249 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.687184 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.773743 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nr4x4" event={"ID":"c0278664-45e1-4e8c-87fb-1674b538a207","Type":"ContainerStarted","Data":"fac91e958a62f67e60e866efc77b98479ead1b3c5d0388f69943133cdb78acf1"} Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.778773 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkxhs" event={"ID":"aa4023d1-b2f6-4276-86cf-baded9184c4f","Type":"ContainerStarted","Data":"f51a6383d18358b098f0359f4b011afef061c5e6b6ef9d26cbf0489138f78a0c"} Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.838943 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pkxhs" podStartSLOduration=2.827203035 podStartE2EDuration="1m17.838915143s" podCreationTimestamp="2025-06-06 09:13:59 +0000 UTC" firstStartedPulling="2025-06-06 09:14:01.210826808 +0000 UTC m=+52.486252351" lastFinishedPulling="2025-06-06 09:15:16.222538926 +0000 UTC m=+127.497964459" observedRunningTime="2025-06-06 09:15:16.835951866 +0000 UTC m=+128.111377429" watchObservedRunningTime="2025-06-06 09:15:16.838915143 +0000 UTC m=+128.114340686" Jun 06 09:15:16 crc kubenswrapper[4911]: I0606 09:15:16.840892 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nr4x4" podStartSLOduration=2.714135518 podStartE2EDuration="1m17.840883254s" podCreationTimestamp="2025-06-06 09:13:59 +0000 UTC" firstStartedPulling="2025-06-06 09:14:01.253543032 +0000 UTC m=+52.528968585" lastFinishedPulling="2025-06-06 09:15:16.380290778 +0000 UTC m=+127.655716321" observedRunningTime="2025-06-06 09:15:16.806221111 +0000 UTC m=+128.081646644" watchObservedRunningTime="2025-06-06 09:15:16.840883254 +0000 UTC m=+128.116308797" Jun 06 09:15:17 crc kubenswrapper[4911]: I0606 09:15:17.282190 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-xw8cr" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="registry-server" probeResult="failure" output=< Jun 06 09:15:17 crc kubenswrapper[4911]: timeout: failed to connect service ":50051" within 1s Jun 06 09:15:17 crc kubenswrapper[4911]: > Jun 06 09:15:18 crc kubenswrapper[4911]: I0606 09:15:18.845184 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:15:18 crc kubenswrapper[4911]: I0606 09:15:18.845576 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:15:18 crc kubenswrapper[4911]: I0606 09:15:18.885440 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:15:19 crc kubenswrapper[4911]: I0606 09:15:19.449348 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:15:19 crc kubenswrapper[4911]: I0606 09:15:19.449813 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:15:20 crc kubenswrapper[4911]: I0606 09:15:20.193629 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:15:20 crc kubenswrapper[4911]: I0606 09:15:20.194028 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:15:20 crc kubenswrapper[4911]: I0606 09:15:20.232451 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:15:20 crc kubenswrapper[4911]: I0606 09:15:20.499996 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nr4x4" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="registry-server" probeResult="failure" output=< Jun 06 09:15:20 crc kubenswrapper[4911]: timeout: failed to connect service ":50051" within 1s Jun 06 09:15:20 crc kubenswrapper[4911]: > Jun 06 09:15:26 crc kubenswrapper[4911]: I0606 09:15:26.279442 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:15:26 crc kubenswrapper[4911]: I0606 09:15:26.326329 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:15:26 crc kubenswrapper[4911]: I0606 09:15:26.679628 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:15:28 crc kubenswrapper[4911]: I0606 09:15:28.310386 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ms5n2"] Jun 06 09:15:28 crc kubenswrapper[4911]: I0606 09:15:28.310923 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ms5n2" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="registry-server" containerID="cri-o://03b3c3b468d498af73854d244b43f076b172f71bb88c5a1b218cf490c356c8d5" gracePeriod=2 Jun 06 09:15:28 crc kubenswrapper[4911]: I0606 09:15:28.889995 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:15:29 crc kubenswrapper[4911]: I0606 09:15:29.502412 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:15:29 crc kubenswrapper[4911]: I0606 09:15:29.544466 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:15:29 crc kubenswrapper[4911]: I0606 09:15:29.685285 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-z2z69"] Jun 06 09:15:29 crc kubenswrapper[4911]: I0606 09:15:29.858732 4911 generic.go:334] "Generic (PLEG): container finished" podID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerID="03b3c3b468d498af73854d244b43f076b172f71bb88c5a1b218cf490c356c8d5" exitCode=0 Jun 06 09:15:29 crc kubenswrapper[4911]: I0606 09:15:29.858834 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ms5n2" event={"ID":"c114b448-dfd7-45ec-8243-62fbdb0a8257","Type":"ContainerDied","Data":"03b3c3b468d498af73854d244b43f076b172f71bb88c5a1b218cf490c356c8d5"} Jun 06 09:15:30 crc kubenswrapper[4911]: I0606 09:15:30.265085 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:15:30 crc kubenswrapper[4911]: I0606 09:15:30.955670 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.032853 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-utilities\") pod \"c114b448-dfd7-45ec-8243-62fbdb0a8257\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.032968 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-catalog-content\") pod \"c114b448-dfd7-45ec-8243-62fbdb0a8257\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.033243 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqgf9\" (UniqueName: \"kubernetes.io/projected/c114b448-dfd7-45ec-8243-62fbdb0a8257-kube-api-access-lqgf9\") pod \"c114b448-dfd7-45ec-8243-62fbdb0a8257\" (UID: \"c114b448-dfd7-45ec-8243-62fbdb0a8257\") " Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.033763 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-utilities" (OuterVolumeSpecName: "utilities") pod "c114b448-dfd7-45ec-8243-62fbdb0a8257" (UID: "c114b448-dfd7-45ec-8243-62fbdb0a8257"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.034686 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.062745 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c114b448-dfd7-45ec-8243-62fbdb0a8257-kube-api-access-lqgf9" (OuterVolumeSpecName: "kube-api-access-lqgf9") pod "c114b448-dfd7-45ec-8243-62fbdb0a8257" (UID: "c114b448-dfd7-45ec-8243-62fbdb0a8257"). InnerVolumeSpecName "kube-api-access-lqgf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.069285 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c114b448-dfd7-45ec-8243-62fbdb0a8257" (UID: "c114b448-dfd7-45ec-8243-62fbdb0a8257"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.135615 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c114b448-dfd7-45ec-8243-62fbdb0a8257-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.135655 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqgf9\" (UniqueName: \"kubernetes.io/projected/c114b448-dfd7-45ec-8243-62fbdb0a8257-kube-api-access-lqgf9\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.314342 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wlbkq"] Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.314657 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wlbkq" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="registry-server" containerID="cri-o://8e91488de53172d4ae4ec23f62f6e39e6686ae6bb6cf4d871bee3a3e8f3020fd" gracePeriod=2 Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.881586 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ms5n2" event={"ID":"c114b448-dfd7-45ec-8243-62fbdb0a8257","Type":"ContainerDied","Data":"7e6b26f55ea763c5ab73c1804386a01efccaecd0d1445d3694fc85ac7cc019f9"} Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.882158 4911 scope.go:117] "RemoveContainer" containerID="03b3c3b468d498af73854d244b43f076b172f71bb88c5a1b218cf490c356c8d5" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.881611 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ms5n2" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.884078 4911 generic.go:334] "Generic (PLEG): container finished" podID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerID="8e91488de53172d4ae4ec23f62f6e39e6686ae6bb6cf4d871bee3a3e8f3020fd" exitCode=0 Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.884130 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wlbkq" event={"ID":"515444b6-48c5-4d0b-9877-7b5b936b7bd0","Type":"ContainerDied","Data":"8e91488de53172d4ae4ec23f62f6e39e6686ae6bb6cf4d871bee3a3e8f3020fd"} Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.898032 4911 scope.go:117] "RemoveContainer" containerID="e210a84fd85a11cc0d1256a25c12e1b9241a961dcf6f31de9f589d2009731ac6" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.953400 4911 scope.go:117] "RemoveContainer" containerID="75739446b5efd283215533bb8c5ff3bdf6181b5044e2a718a5ec909cd39c3eec" Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.958607 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ms5n2"] Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.958671 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ms5n2"] Jun 06 09:15:31 crc kubenswrapper[4911]: I0606 09:15:31.972521 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.049614 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-utilities\") pod \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.050174 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-catalog-content\") pod \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.050340 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld9vv\" (UniqueName: \"kubernetes.io/projected/515444b6-48c5-4d0b-9877-7b5b936b7bd0-kube-api-access-ld9vv\") pod \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\" (UID: \"515444b6-48c5-4d0b-9877-7b5b936b7bd0\") " Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.050756 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-utilities" (OuterVolumeSpecName: "utilities") pod "515444b6-48c5-4d0b-9877-7b5b936b7bd0" (UID: "515444b6-48c5-4d0b-9877-7b5b936b7bd0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.050933 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.055467 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/515444b6-48c5-4d0b-9877-7b5b936b7bd0-kube-api-access-ld9vv" (OuterVolumeSpecName: "kube-api-access-ld9vv") pod "515444b6-48c5-4d0b-9877-7b5b936b7bd0" (UID: "515444b6-48c5-4d0b-9877-7b5b936b7bd0"). InnerVolumeSpecName "kube-api-access-ld9vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.061475 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "515444b6-48c5-4d0b-9877-7b5b936b7bd0" (UID: "515444b6-48c5-4d0b-9877-7b5b936b7bd0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.152601 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/515444b6-48c5-4d0b-9877-7b5b936b7bd0-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.152672 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld9vv\" (UniqueName: \"kubernetes.io/projected/515444b6-48c5-4d0b-9877-7b5b936b7bd0-kube-api-access-ld9vv\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.712300 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pkxhs"] Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.712688 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pkxhs" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="registry-server" containerID="cri-o://f51a6383d18358b098f0359f4b011afef061c5e6b6ef9d26cbf0489138f78a0c" gracePeriod=2 Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.898663 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wlbkq" event={"ID":"515444b6-48c5-4d0b-9877-7b5b936b7bd0","Type":"ContainerDied","Data":"106f38c34279beb87c2c2a10e4f44a609ba17a83d34562bcedaa9132f9611a62"} Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.898751 4911 scope.go:117] "RemoveContainer" containerID="8e91488de53172d4ae4ec23f62f6e39e6686ae6bb6cf4d871bee3a3e8f3020fd" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.898912 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wlbkq" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.903815 4911 generic.go:334] "Generic (PLEG): container finished" podID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerID="f51a6383d18358b098f0359f4b011afef061c5e6b6ef9d26cbf0489138f78a0c" exitCode=0 Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.903878 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkxhs" event={"ID":"aa4023d1-b2f6-4276-86cf-baded9184c4f","Type":"ContainerDied","Data":"f51a6383d18358b098f0359f4b011afef061c5e6b6ef9d26cbf0489138f78a0c"} Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.918967 4911 scope.go:117] "RemoveContainer" containerID="a5d3986eebabe987c5d189c3e0beefe83d88bcb07b7944c85e0331b5f6ee1635" Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.943210 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wlbkq"] Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.946880 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wlbkq"] Jun 06 09:15:32 crc kubenswrapper[4911]: I0606 09:15:32.985300 4911 scope.go:117] "RemoveContainer" containerID="4e2987e4da5116e3e36d5704479b7538d1bd7a326c0f87c31d2719ccb2689429" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.113040 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.172507 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-catalog-content\") pod \"aa4023d1-b2f6-4276-86cf-baded9184c4f\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.172637 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bv5d\" (UniqueName: \"kubernetes.io/projected/aa4023d1-b2f6-4276-86cf-baded9184c4f-kube-api-access-5bv5d\") pod \"aa4023d1-b2f6-4276-86cf-baded9184c4f\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.172819 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-utilities\") pod \"aa4023d1-b2f6-4276-86cf-baded9184c4f\" (UID: \"aa4023d1-b2f6-4276-86cf-baded9184c4f\") " Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.173806 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-utilities" (OuterVolumeSpecName: "utilities") pod "aa4023d1-b2f6-4276-86cf-baded9184c4f" (UID: "aa4023d1-b2f6-4276-86cf-baded9184c4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.178440 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa4023d1-b2f6-4276-86cf-baded9184c4f-kube-api-access-5bv5d" (OuterVolumeSpecName: "kube-api-access-5bv5d") pod "aa4023d1-b2f6-4276-86cf-baded9184c4f" (UID: "aa4023d1-b2f6-4276-86cf-baded9184c4f"). InnerVolumeSpecName "kube-api-access-5bv5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.231114 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa4023d1-b2f6-4276-86cf-baded9184c4f" (UID: "aa4023d1-b2f6-4276-86cf-baded9184c4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.274466 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bv5d\" (UniqueName: \"kubernetes.io/projected/aa4023d1-b2f6-4276-86cf-baded9184c4f-kube-api-access-5bv5d\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.274581 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.274595 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa4023d1-b2f6-4276-86cf-baded9184c4f-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.915948 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pkxhs" event={"ID":"aa4023d1-b2f6-4276-86cf-baded9184c4f","Type":"ContainerDied","Data":"ab74f891c2b40384fa130ae30685d437cdc4a276c06afc2ed876d4e1a06d32b1"} Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.916016 4911 scope.go:117] "RemoveContainer" containerID="f51a6383d18358b098f0359f4b011afef061c5e6b6ef9d26cbf0489138f78a0c" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.916218 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pkxhs" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.934503 4911 scope.go:117] "RemoveContainer" containerID="9e2b9158561f4553b2ca40c88440407ae9ef765069e282446f5436bad2fc1269" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.962828 4911 scope.go:117] "RemoveContainer" containerID="1c2911faef9b0a949b0e4ab5285620bea6c1227f397a9e0316e470224b1d7bfb" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.972540 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" path="/var/lib/kubelet/pods/515444b6-48c5-4d0b-9877-7b5b936b7bd0/volumes" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.973278 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" path="/var/lib/kubelet/pods/c114b448-dfd7-45ec-8243-62fbdb0a8257/volumes" Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.973899 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pkxhs"] Jun 06 09:15:33 crc kubenswrapper[4911]: I0606 09:15:33.973948 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pkxhs"] Jun 06 09:15:35 crc kubenswrapper[4911]: I0606 09:15:35.956005 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" path="/var/lib/kubelet/pods/aa4023d1-b2f6-4276-86cf-baded9184c4f/volumes" Jun 06 09:15:54 crc kubenswrapper[4911]: I0606 09:15:54.301037 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:15:54 crc kubenswrapper[4911]: I0606 09:15:54.301736 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:15:54 crc kubenswrapper[4911]: I0606 09:15:54.720268 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" podUID="4ae50abd-4f8f-495f-8d6e-34f8b2b19711" containerName="oauth-openshift" containerID="cri-o://fe27d21344612727a47f9002a7c9feb7c7d7b984a8b2ab914531950dc2f8510e" gracePeriod=15 Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.037136 4911 generic.go:334] "Generic (PLEG): container finished" podID="4ae50abd-4f8f-495f-8d6e-34f8b2b19711" containerID="fe27d21344612727a47f9002a7c9feb7c7d7b984a8b2ab914531950dc2f8510e" exitCode=0 Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.037235 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" event={"ID":"4ae50abd-4f8f-495f-8d6e-34f8b2b19711","Type":"ContainerDied","Data":"fe27d21344612727a47f9002a7c9feb7c7d7b984a8b2ab914531950dc2f8510e"} Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.094239 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126578 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5dc57f868f-6k7cf"] Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126805 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126822 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126832 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126841 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126853 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ae50abd-4f8f-495f-8d6e-34f8b2b19711" containerName="oauth-openshift" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126862 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ae50abd-4f8f-495f-8d6e-34f8b2b19711" containerName="oauth-openshift" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126872 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126879 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126889 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126896 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126908 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126916 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126928 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" containerName="collect-profiles" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126936 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" containerName="collect-profiles" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126946 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126955 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126967 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126974 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.126985 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.126992 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="extract-content" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.127000 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127008 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="extract-utilities" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.127022 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127029 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.127038 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127045 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: E0606 09:15:55.127056 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127064 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127225 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa4023d1-b2f6-4276-86cf-baded9184c4f" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127247 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" containerName="collect-profiles" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127259 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0c9e82b-5d39-44ae-b3fa-d96184d32d34" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127269 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="515444b6-48c5-4d0b-9877-7b5b936b7bd0" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127282 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c114b448-dfd7-45ec-8243-62fbdb0a8257" containerName="registry-server" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127294 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ae50abd-4f8f-495f-8d6e-34f8b2b19711" containerName="oauth-openshift" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.127727 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.137446 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5dc57f868f-6k7cf"] Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167287 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-serving-cert\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167406 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-login\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167433 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-service-ca\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167459 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-policies\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167475 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-session\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167506 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-provider-selection\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167548 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-dir\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167576 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-router-certs\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167606 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-error\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167627 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-ocp-branding-template\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167665 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-idp-0-file-data\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167691 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2666\" (UniqueName: \"kubernetes.io/projected/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-kube-api-access-r2666\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167716 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-cliconfig\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167736 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-trusted-ca-bundle\") pod \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\" (UID: \"4ae50abd-4f8f-495f-8d6e-34f8b2b19711\") " Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167887 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-session\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167933 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-audit-policies\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.167979 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168006 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-router-certs\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168030 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168060 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlvvf\" (UniqueName: \"kubernetes.io/projected/65b6d4c8-305a-4be7-a97b-5a54dd415178-kube-api-access-xlvvf\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168114 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-login\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168157 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168188 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-service-ca\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168214 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168240 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-error\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168372 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168486 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65b6d4c8-305a-4be7-a97b-5a54dd415178-audit-dir\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168538 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168641 4911 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-dir\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168843 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.169018 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.168720 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.172660 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.174303 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.174489 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.174629 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.174830 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.175030 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.175462 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.175480 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-kube-api-access-r2666" (OuterVolumeSpecName: "kube-api-access-r2666") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "kube-api-access-r2666". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.175904 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.175993 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4ae50abd-4f8f-495f-8d6e-34f8b2b19711" (UID: "4ae50abd-4f8f-495f-8d6e-34f8b2b19711"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269391 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65b6d4c8-305a-4be7-a97b-5a54dd415178-audit-dir\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269438 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269463 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269495 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-session\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269529 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-audit-policies\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269535 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65b6d4c8-305a-4be7-a97b-5a54dd415178-audit-dir\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269563 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269589 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269609 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-router-certs\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269639 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlvvf\" (UniqueName: \"kubernetes.io/projected/65b6d4c8-305a-4be7-a97b-5a54dd415178-kube-api-access-xlvvf\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269679 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-login\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269706 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269731 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-service-ca\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269754 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269774 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-error\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269819 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269833 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269845 4911 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-audit-policies\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269860 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269873 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269887 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269900 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269913 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269927 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269941 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2666\" (UniqueName: \"kubernetes.io/projected/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-kube-api-access-r2666\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269955 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269967 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.269983 4911 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4ae50abd-4f8f-495f-8d6e-34f8b2b19711-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.270683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-audit-policies\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.270949 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-service-ca\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.270949 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.271070 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.273154 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-session\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.273335 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.273845 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-login\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.274142 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-error\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.274338 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.274502 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-router-certs\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.274778 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.274783 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/65b6d4c8-305a-4be7-a97b-5a54dd415178-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.289182 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlvvf\" (UniqueName: \"kubernetes.io/projected/65b6d4c8-305a-4be7-a97b-5a54dd415178-kube-api-access-xlvvf\") pod \"oauth-openshift-5dc57f868f-6k7cf\" (UID: \"65b6d4c8-305a-4be7-a97b-5a54dd415178\") " pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.442890 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:55 crc kubenswrapper[4911]: I0606 09:15:55.883525 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5dc57f868f-6k7cf"] Jun 06 09:15:56 crc kubenswrapper[4911]: I0606 09:15:56.045317 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" event={"ID":"65b6d4c8-305a-4be7-a97b-5a54dd415178","Type":"ContainerStarted","Data":"70b7dfd5dcf4c9a9d9fe4f28ab88a37d4db83d9279df61537157fa800c427ce5"} Jun 06 09:15:56 crc kubenswrapper[4911]: I0606 09:15:56.046994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" event={"ID":"4ae50abd-4f8f-495f-8d6e-34f8b2b19711","Type":"ContainerDied","Data":"548fc523b7628bb1e56761dd1527c5a2271570df886538e1584efa1cf38a2de6"} Jun 06 09:15:56 crc kubenswrapper[4911]: I0606 09:15:56.047037 4911 scope.go:117] "RemoveContainer" containerID="fe27d21344612727a47f9002a7c9feb7c7d7b984a8b2ab914531950dc2f8510e" Jun 06 09:15:56 crc kubenswrapper[4911]: I0606 09:15:56.047076 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-z2z69" Jun 06 09:15:56 crc kubenswrapper[4911]: I0606 09:15:56.084745 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-z2z69"] Jun 06 09:15:56 crc kubenswrapper[4911]: I0606 09:15:56.087816 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-z2z69"] Jun 06 09:15:57 crc kubenswrapper[4911]: I0606 09:15:57.056446 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" event={"ID":"65b6d4c8-305a-4be7-a97b-5a54dd415178","Type":"ContainerStarted","Data":"87ee4464ae135f46714dc1b8774140265fb207317a6636f4ed05247b32215df3"} Jun 06 09:15:57 crc kubenswrapper[4911]: I0606 09:15:57.058049 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:57 crc kubenswrapper[4911]: I0606 09:15:57.064155 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" Jun 06 09:15:57 crc kubenswrapper[4911]: I0606 09:15:57.079085 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5dc57f868f-6k7cf" podStartSLOduration=28.079065431 podStartE2EDuration="28.079065431s" podCreationTimestamp="2025-06-06 09:15:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:15:57.077730182 +0000 UTC m=+168.353155745" watchObservedRunningTime="2025-06-06 09:15:57.079065431 +0000 UTC m=+168.354490964" Jun 06 09:15:57 crc kubenswrapper[4911]: I0606 09:15:57.955636 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ae50abd-4f8f-495f-8d6e-34f8b2b19711" path="/var/lib/kubelet/pods/4ae50abd-4f8f-495f-8d6e-34f8b2b19711/volumes" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:21.995446 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xw8cr"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:21.996981 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xw8cr" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="registry-server" containerID="cri-o://5b44cfa8660718291257d8fb31d036c0915a5d7839c5ecf523e69c59e79ea909" gracePeriod=30 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.009599 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hb7s6"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.009984 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hb7s6" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="registry-server" containerID="cri-o://70133e568d040b7873a4c0250fb97a6280d07b95d809d9fe86265dfd4f19699d" gracePeriod=30 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.016745 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-64hwv"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.023978 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mnsl7"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.024713 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mnsl7" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="registry-server" containerID="cri-o://a4453c7991f177096ef665917199f165863cbd093aec21b58476a73b16a15014" gracePeriod=30 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.030429 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nr4x4"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.031205 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nr4x4" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="registry-server" containerID="cri-o://fac91e958a62f67e60e866efc77b98479ead1b3c5d0388f69943133cdb78acf1" gracePeriod=30 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.044579 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r6jnl"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.045813 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.049219 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r6jnl"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.206281 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18686ff3-8800-4c67-b287-5989dd4dd44e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.206398 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18686ff3-8800-4c67-b287-5989dd4dd44e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.206450 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkpv5\" (UniqueName: \"kubernetes.io/projected/18686ff3-8800-4c67-b287-5989dd4dd44e-kube-api-access-tkpv5\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.206995 4911 generic.go:334] "Generic (PLEG): container finished" podID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerID="5b44cfa8660718291257d8fb31d036c0915a5d7839c5ecf523e69c59e79ea909" exitCode=0 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.207078 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw8cr" event={"ID":"c994037f-6a9e-4e7e-82ee-e390d62354e0","Type":"ContainerDied","Data":"5b44cfa8660718291257d8fb31d036c0915a5d7839c5ecf523e69c59e79ea909"} Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.211065 4911 generic.go:334] "Generic (PLEG): container finished" podID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerID="70133e568d040b7873a4c0250fb97a6280d07b95d809d9fe86265dfd4f19699d" exitCode=0 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.211137 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb7s6" event={"ID":"4d92a6a8-bc23-4b3e-959d-75741ac051f3","Type":"ContainerDied","Data":"70133e568d040b7873a4c0250fb97a6280d07b95d809d9fe86265dfd4f19699d"} Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.215222 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0278664-45e1-4e8c-87fb-1674b538a207" containerID="fac91e958a62f67e60e866efc77b98479ead1b3c5d0388f69943133cdb78acf1" exitCode=0 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.215339 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nr4x4" event={"ID":"c0278664-45e1-4e8c-87fb-1674b538a207","Type":"ContainerDied","Data":"fac91e958a62f67e60e866efc77b98479ead1b3c5d0388f69943133cdb78acf1"} Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.219620 4911 generic.go:334] "Generic (PLEG): container finished" podID="64277bb9-7100-46a8-b522-b8774320c4e1" containerID="a4453c7991f177096ef665917199f165863cbd093aec21b58476a73b16a15014" exitCode=0 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.219675 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mnsl7" event={"ID":"64277bb9-7100-46a8-b522-b8774320c4e1","Type":"ContainerDied","Data":"a4453c7991f177096ef665917199f165863cbd093aec21b58476a73b16a15014"} Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.219917 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" podUID="573af29b-3e41-4b58-aec9-8bbfe7845920" containerName="marketplace-operator" containerID="cri-o://a5160a7dd73b764858005aa3892acaa300753976b3bf4515b7ed373e86379dfb" gracePeriod=30 Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.308199 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18686ff3-8800-4c67-b287-5989dd4dd44e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.308323 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18686ff3-8800-4c67-b287-5989dd4dd44e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.308373 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkpv5\" (UniqueName: \"kubernetes.io/projected/18686ff3-8800-4c67-b287-5989dd4dd44e-kube-api-access-tkpv5\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.311231 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18686ff3-8800-4c67-b287-5989dd4dd44e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.316914 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18686ff3-8800-4c67-b287-5989dd4dd44e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.335174 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkpv5\" (UniqueName: \"kubernetes.io/projected/18686ff3-8800-4c67-b287-5989dd4dd44e-kube-api-access-tkpv5\") pod \"marketplace-operator-79b997595-r6jnl\" (UID: \"18686ff3-8800-4c67-b287-5989dd4dd44e\") " pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.377211 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.620810 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r6jnl"] Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.628752 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.639227 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.644274 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.660461 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.816245 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-catalog-content\") pod \"c994037f-6a9e-4e7e-82ee-e390d62354e0\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.817247 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-catalog-content\") pod \"64277bb9-7100-46a8-b522-b8774320c4e1\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.817361 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-utilities\") pod \"c0278664-45e1-4e8c-87fb-1674b538a207\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.817441 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-utilities\") pod \"c994037f-6a9e-4e7e-82ee-e390d62354e0\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.817595 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgqjj\" (UniqueName: \"kubernetes.io/projected/64277bb9-7100-46a8-b522-b8774320c4e1-kube-api-access-tgqjj\") pod \"64277bb9-7100-46a8-b522-b8774320c4e1\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.817708 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29zrl\" (UniqueName: \"kubernetes.io/projected/4d92a6a8-bc23-4b3e-959d-75741ac051f3-kube-api-access-29zrl\") pod \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.817821 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-utilities\") pod \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.818194 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9pmz\" (UniqueName: \"kubernetes.io/projected/c0278664-45e1-4e8c-87fb-1674b538a207-kube-api-access-j9pmz\") pod \"c0278664-45e1-4e8c-87fb-1674b538a207\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.818324 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-catalog-content\") pod \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\" (UID: \"4d92a6a8-bc23-4b3e-959d-75741ac051f3\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.818425 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-catalog-content\") pod \"c0278664-45e1-4e8c-87fb-1674b538a207\" (UID: \"c0278664-45e1-4e8c-87fb-1674b538a207\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.819347 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-utilities\") pod \"64277bb9-7100-46a8-b522-b8774320c4e1\" (UID: \"64277bb9-7100-46a8-b522-b8774320c4e1\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.819449 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmz58\" (UniqueName: \"kubernetes.io/projected/c994037f-6a9e-4e7e-82ee-e390d62354e0-kube-api-access-bmz58\") pod \"c994037f-6a9e-4e7e-82ee-e390d62354e0\" (UID: \"c994037f-6a9e-4e7e-82ee-e390d62354e0\") " Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.818050 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-utilities" (OuterVolumeSpecName: "utilities") pod "c0278664-45e1-4e8c-87fb-1674b538a207" (UID: "c0278664-45e1-4e8c-87fb-1674b538a207"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.818543 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-utilities" (OuterVolumeSpecName: "utilities") pod "c994037f-6a9e-4e7e-82ee-e390d62354e0" (UID: "c994037f-6a9e-4e7e-82ee-e390d62354e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.819045 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-utilities" (OuterVolumeSpecName: "utilities") pod "4d92a6a8-bc23-4b3e-959d-75741ac051f3" (UID: "4d92a6a8-bc23-4b3e-959d-75741ac051f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.820310 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-utilities" (OuterVolumeSpecName: "utilities") pod "64277bb9-7100-46a8-b522-b8774320c4e1" (UID: "64277bb9-7100-46a8-b522-b8774320c4e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.820640 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.820713 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.820730 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.825314 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c994037f-6a9e-4e7e-82ee-e390d62354e0-kube-api-access-bmz58" (OuterVolumeSpecName: "kube-api-access-bmz58") pod "c994037f-6a9e-4e7e-82ee-e390d62354e0" (UID: "c994037f-6a9e-4e7e-82ee-e390d62354e0"). InnerVolumeSpecName "kube-api-access-bmz58". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.825375 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0278664-45e1-4e8c-87fb-1674b538a207-kube-api-access-j9pmz" (OuterVolumeSpecName: "kube-api-access-j9pmz") pod "c0278664-45e1-4e8c-87fb-1674b538a207" (UID: "c0278664-45e1-4e8c-87fb-1674b538a207"). InnerVolumeSpecName "kube-api-access-j9pmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.825511 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64277bb9-7100-46a8-b522-b8774320c4e1-kube-api-access-tgqjj" (OuterVolumeSpecName: "kube-api-access-tgqjj") pod "64277bb9-7100-46a8-b522-b8774320c4e1" (UID: "64277bb9-7100-46a8-b522-b8774320c4e1"). InnerVolumeSpecName "kube-api-access-tgqjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.825469 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d92a6a8-bc23-4b3e-959d-75741ac051f3-kube-api-access-29zrl" (OuterVolumeSpecName: "kube-api-access-29zrl") pod "4d92a6a8-bc23-4b3e-959d-75741ac051f3" (UID: "4d92a6a8-bc23-4b3e-959d-75741ac051f3"). InnerVolumeSpecName "kube-api-access-29zrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.831707 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64277bb9-7100-46a8-b522-b8774320c4e1" (UID: "64277bb9-7100-46a8-b522-b8774320c4e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.856626 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c994037f-6a9e-4e7e-82ee-e390d62354e0" (UID: "c994037f-6a9e-4e7e-82ee-e390d62354e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.881511 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d92a6a8-bc23-4b3e-959d-75741ac051f3" (UID: "4d92a6a8-bc23-4b3e-959d-75741ac051f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.891056 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0278664-45e1-4e8c-87fb-1674b538a207" (UID: "c0278664-45e1-4e8c-87fb-1674b538a207"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922215 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922306 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmz58\" (UniqueName: \"kubernetes.io/projected/c994037f-6a9e-4e7e-82ee-e390d62354e0-kube-api-access-bmz58\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922324 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c994037f-6a9e-4e7e-82ee-e390d62354e0-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922339 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64277bb9-7100-46a8-b522-b8774320c4e1-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922352 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgqjj\" (UniqueName: \"kubernetes.io/projected/64277bb9-7100-46a8-b522-b8774320c4e1-kube-api-access-tgqjj\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922365 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29zrl\" (UniqueName: \"kubernetes.io/projected/4d92a6a8-bc23-4b3e-959d-75741ac051f3-kube-api-access-29zrl\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922379 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9pmz\" (UniqueName: \"kubernetes.io/projected/c0278664-45e1-4e8c-87fb-1674b538a207-kube-api-access-j9pmz\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922391 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d92a6a8-bc23-4b3e-959d-75741ac051f3-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:22 crc kubenswrapper[4911]: I0606 09:16:22.922404 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0278664-45e1-4e8c-87fb-1674b538a207-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.229352 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mnsl7" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.229360 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mnsl7" event={"ID":"64277bb9-7100-46a8-b522-b8774320c4e1","Type":"ContainerDied","Data":"49d55ed4bdf311fbe36148988baf034b7b2254a35e918716934082e3f335ba68"} Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.229444 4911 scope.go:117] "RemoveContainer" containerID="a4453c7991f177096ef665917199f165863cbd093aec21b58476a73b16a15014" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.235120 4911 generic.go:334] "Generic (PLEG): container finished" podID="573af29b-3e41-4b58-aec9-8bbfe7845920" containerID="a5160a7dd73b764858005aa3892acaa300753976b3bf4515b7ed373e86379dfb" exitCode=0 Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.235237 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" event={"ID":"573af29b-3e41-4b58-aec9-8bbfe7845920","Type":"ContainerDied","Data":"a5160a7dd73b764858005aa3892acaa300753976b3bf4515b7ed373e86379dfb"} Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.242071 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw8cr" event={"ID":"c994037f-6a9e-4e7e-82ee-e390d62354e0","Type":"ContainerDied","Data":"ba296a1ff876bb1ae0ff070e76df03975c38f307b7a57acd8b5094259ed345fe"} Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.242156 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw8cr" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.249846 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hb7s6" event={"ID":"4d92a6a8-bc23-4b3e-959d-75741ac051f3","Type":"ContainerDied","Data":"67907f81b43ce948a964c35097987125765537a916c7fe94e01dfa41189414b9"} Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.249997 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hb7s6" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.258800 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nr4x4" event={"ID":"c0278664-45e1-4e8c-87fb-1674b538a207","Type":"ContainerDied","Data":"b62b56fe1c217e8272d31085d4c692bcb5675ef2c1c9c23dd36dde0fb27e5cf7"} Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.258907 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nr4x4" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.263385 4911 scope.go:117] "RemoveContainer" containerID="a69c15a3e885cf81926c99284dde9c01f76ce8b0ead553bad2d5f17a6b63174a" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.267138 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mnsl7"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.272394 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mnsl7"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.273002 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" event={"ID":"18686ff3-8800-4c67-b287-5989dd4dd44e","Type":"ContainerStarted","Data":"ace69305a24dd8e97dacfc07678b33475ab08733f447050f50d7a42c166b21c3"} Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.273043 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" event={"ID":"18686ff3-8800-4c67-b287-5989dd4dd44e","Type":"ContainerStarted","Data":"a16bceca64e237dc9248ad4b1c375323903aa694ecd62b0062094d8784697457"} Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.273284 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.276221 4911 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r6jnl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.56:8080/healthz\": dial tcp 10.217.0.56:8080: connect: connection refused" start-of-body= Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.276273 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" podUID="18686ff3-8800-4c67-b287-5989dd4dd44e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.56:8080/healthz\": dial tcp 10.217.0.56:8080: connect: connection refused" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.278333 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xw8cr"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.282022 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xw8cr"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.311018 4911 scope.go:117] "RemoveContainer" containerID="62e369214b99aca6d560934de68c37dfcf7701d604e8c572beb33d3082039205" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.322255 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" podStartSLOduration=1.322221383 podStartE2EDuration="1.322221383s" podCreationTimestamp="2025-06-06 09:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:16:23.318676602 +0000 UTC m=+194.594102145" watchObservedRunningTime="2025-06-06 09:16:23.322221383 +0000 UTC m=+194.597646926" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.341365 4911 scope.go:117] "RemoveContainer" containerID="5b44cfa8660718291257d8fb31d036c0915a5d7839c5ecf523e69c59e79ea909" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.352990 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nr4x4"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.370280 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nr4x4"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.375553 4911 scope.go:117] "RemoveContainer" containerID="8d976b2bcc1e0ecbd596cfd59d48b7ca96a9bfa61216a61d6c777236aff24a52" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.377638 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hb7s6"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.381170 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hb7s6"] Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.399800 4911 scope.go:117] "RemoveContainer" containerID="af6b0d10ba7d600eabfa7c087157efb5c3111f963c4fd0d58aeb82bf9f054b6b" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.437239 4911 scope.go:117] "RemoveContainer" containerID="70133e568d040b7873a4c0250fb97a6280d07b95d809d9fe86265dfd4f19699d" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.455945 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.463899 4911 scope.go:117] "RemoveContainer" containerID="b7513562b45533f69a5cb1c0c190db29cda5efe5181f45eef2b4ccca5b5b84c0" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.483489 4911 scope.go:117] "RemoveContainer" containerID="ac2359254f9888a48857cf2ad2bbc25e6fbf287d7a3a5c725101746c0904d163" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.501065 4911 scope.go:117] "RemoveContainer" containerID="fac91e958a62f67e60e866efc77b98479ead1b3c5d0388f69943133cdb78acf1" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.516639 4911 scope.go:117] "RemoveContainer" containerID="d129a3351b5c5c45fec902aded51ea00632d3b7d2b710dd569affa282a2c5b95" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.536775 4911 scope.go:117] "RemoveContainer" containerID="db71c70584a7fade3e07f719ff2af36586ce97ec2ea4d6a5fcc86e9103eeadde" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.632322 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqpww\" (UniqueName: \"kubernetes.io/projected/573af29b-3e41-4b58-aec9-8bbfe7845920-kube-api-access-bqpww\") pod \"573af29b-3e41-4b58-aec9-8bbfe7845920\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.632446 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-trusted-ca\") pod \"573af29b-3e41-4b58-aec9-8bbfe7845920\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.632507 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-operator-metrics\") pod \"573af29b-3e41-4b58-aec9-8bbfe7845920\" (UID: \"573af29b-3e41-4b58-aec9-8bbfe7845920\") " Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.633488 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "573af29b-3e41-4b58-aec9-8bbfe7845920" (UID: "573af29b-3e41-4b58-aec9-8bbfe7845920"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.636796 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "573af29b-3e41-4b58-aec9-8bbfe7845920" (UID: "573af29b-3e41-4b58-aec9-8bbfe7845920"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.637155 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/573af29b-3e41-4b58-aec9-8bbfe7845920-kube-api-access-bqpww" (OuterVolumeSpecName: "kube-api-access-bqpww") pod "573af29b-3e41-4b58-aec9-8bbfe7845920" (UID: "573af29b-3e41-4b58-aec9-8bbfe7845920"). InnerVolumeSpecName "kube-api-access-bqpww". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.734452 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqpww\" (UniqueName: \"kubernetes.io/projected/573af29b-3e41-4b58-aec9-8bbfe7845920-kube-api-access-bqpww\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.734534 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.734546 4911 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/573af29b-3e41-4b58-aec9-8bbfe7845920-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.955978 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" path="/var/lib/kubelet/pods/4d92a6a8-bc23-4b3e-959d-75741ac051f3/volumes" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.956838 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" path="/var/lib/kubelet/pods/64277bb9-7100-46a8-b522-b8774320c4e1/volumes" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.957611 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" path="/var/lib/kubelet/pods/c0278664-45e1-4e8c-87fb-1674b538a207/volumes" Jun 06 09:16:23 crc kubenswrapper[4911]: I0606 09:16:23.958886 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" path="/var/lib/kubelet/pods/c994037f-6a9e-4e7e-82ee-e390d62354e0/volumes" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.222874 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p29wq"] Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223237 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223263 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223277 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223286 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223296 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223308 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223319 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223329 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223339 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223348 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223358 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223365 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223375 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223381 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223394 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223401 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223411 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223420 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223433 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223443 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="extract-content" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223453 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="573af29b-3e41-4b58-aec9-8bbfe7845920" containerName="marketplace-operator" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223460 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="573af29b-3e41-4b58-aec9-8bbfe7845920" containerName="marketplace-operator" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223470 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223478 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="extract-utilities" Jun 06 09:16:24 crc kubenswrapper[4911]: E0606 09:16:24.223492 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223499 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223631 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="64277bb9-7100-46a8-b522-b8774320c4e1" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223649 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c994037f-6a9e-4e7e-82ee-e390d62354e0" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223663 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0278664-45e1-4e8c-87fb-1674b538a207" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223673 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d92a6a8-bc23-4b3e-959d-75741ac051f3" containerName="registry-server" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.223683 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="573af29b-3e41-4b58-aec9-8bbfe7845920" containerName="marketplace-operator" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.224846 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.227906 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.236827 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p29wq"] Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.242085 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b15e184d-dfb3-492c-a3aa-32e514493e6f-catalog-content\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.242194 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b15e184d-dfb3-492c-a3aa-32e514493e6f-utilities\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.242257 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpq8p\" (UniqueName: \"kubernetes.io/projected/b15e184d-dfb3-492c-a3aa-32e514493e6f-kube-api-access-tpq8p\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.281337 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" event={"ID":"573af29b-3e41-4b58-aec9-8bbfe7845920","Type":"ContainerDied","Data":"cf8a3388ad1ab621d4eed7b2a88310a5aa6bf7980f0ae95eff3bb083c9605bd7"} Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.281381 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-64hwv" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.281767 4911 scope.go:117] "RemoveContainer" containerID="a5160a7dd73b764858005aa3892acaa300753976b3bf4515b7ed373e86379dfb" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.292916 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-r6jnl" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.300366 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.300493 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.343384 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-64hwv"] Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.344068 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b15e184d-dfb3-492c-a3aa-32e514493e6f-utilities\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.344161 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpq8p\" (UniqueName: \"kubernetes.io/projected/b15e184d-dfb3-492c-a3aa-32e514493e6f-kube-api-access-tpq8p\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.344192 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b15e184d-dfb3-492c-a3aa-32e514493e6f-catalog-content\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.344801 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b15e184d-dfb3-492c-a3aa-32e514493e6f-catalog-content\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.345237 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b15e184d-dfb3-492c-a3aa-32e514493e6f-utilities\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.347634 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-64hwv"] Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.364774 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpq8p\" (UniqueName: \"kubernetes.io/projected/b15e184d-dfb3-492c-a3aa-32e514493e6f-kube-api-access-tpq8p\") pod \"redhat-marketplace-p29wq\" (UID: \"b15e184d-dfb3-492c-a3aa-32e514493e6f\") " pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.418424 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rj7zg"] Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.420023 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.425112 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.433736 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rj7zg"] Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.547794 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7006e423-c587-4159-aba3-aa155251dee6-catalog-content\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.547858 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4zll\" (UniqueName: \"kubernetes.io/projected/7006e423-c587-4159-aba3-aa155251dee6-kube-api-access-z4zll\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.547910 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7006e423-c587-4159-aba3-aa155251dee6-utilities\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.628659 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.650369 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7006e423-c587-4159-aba3-aa155251dee6-catalog-content\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.650447 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4zll\" (UniqueName: \"kubernetes.io/projected/7006e423-c587-4159-aba3-aa155251dee6-kube-api-access-z4zll\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.650483 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7006e423-c587-4159-aba3-aa155251dee6-utilities\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.651178 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7006e423-c587-4159-aba3-aa155251dee6-catalog-content\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.651249 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7006e423-c587-4159-aba3-aa155251dee6-utilities\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.678834 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4zll\" (UniqueName: \"kubernetes.io/projected/7006e423-c587-4159-aba3-aa155251dee6-kube-api-access-z4zll\") pod \"redhat-operators-rj7zg\" (UID: \"7006e423-c587-4159-aba3-aa155251dee6\") " pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.740793 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.835026 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p29wq"] Jun 06 09:16:24 crc kubenswrapper[4911]: I0606 09:16:24.944852 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rj7zg"] Jun 06 09:16:24 crc kubenswrapper[4911]: W0606 09:16:24.952170 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7006e423_c587_4159_aba3_aa155251dee6.slice/crio-2b1aa73371b00432022f65732c70eb2ae9d3dd644876c96999eabbe03a979604 WatchSource:0}: Error finding container 2b1aa73371b00432022f65732c70eb2ae9d3dd644876c96999eabbe03a979604: Status 404 returned error can't find the container with id 2b1aa73371b00432022f65732c70eb2ae9d3dd644876c96999eabbe03a979604 Jun 06 09:16:25 crc kubenswrapper[4911]: I0606 09:16:25.298857 4911 generic.go:334] "Generic (PLEG): container finished" podID="b15e184d-dfb3-492c-a3aa-32e514493e6f" containerID="f5c7838478bddd076b0e0a332d3fc15ccb36eda5f21affc33765d77ed75d8643" exitCode=0 Jun 06 09:16:25 crc kubenswrapper[4911]: I0606 09:16:25.298930 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p29wq" event={"ID":"b15e184d-dfb3-492c-a3aa-32e514493e6f","Type":"ContainerDied","Data":"f5c7838478bddd076b0e0a332d3fc15ccb36eda5f21affc33765d77ed75d8643"} Jun 06 09:16:25 crc kubenswrapper[4911]: I0606 09:16:25.299326 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p29wq" event={"ID":"b15e184d-dfb3-492c-a3aa-32e514493e6f","Type":"ContainerStarted","Data":"bd3824786d43447ac6aa3770f98f2c4e230d5afd8882b74f6873f94776fd047d"} Jun 06 09:16:25 crc kubenswrapper[4911]: I0606 09:16:25.305349 4911 generic.go:334] "Generic (PLEG): container finished" podID="7006e423-c587-4159-aba3-aa155251dee6" containerID="d5c2bbbed97a6707e6634d1585dac5ea5181fdfd2a82e3d341dd9aed050d2961" exitCode=0 Jun 06 09:16:25 crc kubenswrapper[4911]: I0606 09:16:25.305430 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rj7zg" event={"ID":"7006e423-c587-4159-aba3-aa155251dee6","Type":"ContainerDied","Data":"d5c2bbbed97a6707e6634d1585dac5ea5181fdfd2a82e3d341dd9aed050d2961"} Jun 06 09:16:25 crc kubenswrapper[4911]: I0606 09:16:25.305469 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rj7zg" event={"ID":"7006e423-c587-4159-aba3-aa155251dee6","Type":"ContainerStarted","Data":"2b1aa73371b00432022f65732c70eb2ae9d3dd644876c96999eabbe03a979604"} Jun 06 09:16:25 crc kubenswrapper[4911]: I0606 09:16:25.955899 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="573af29b-3e41-4b58-aec9-8bbfe7845920" path="/var/lib/kubelet/pods/573af29b-3e41-4b58-aec9-8bbfe7845920/volumes" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.618708 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rwvgf"] Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.620572 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.623162 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.628559 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rwvgf"] Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.778894 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-catalog-content\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.779036 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-utilities\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.779755 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwcht\" (UniqueName: \"kubernetes.io/projected/dfa00874-57d4-44e2-a6e0-50c389978f21-kube-api-access-qwcht\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.830119 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g6x48"] Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.833081 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.834416 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g6x48"] Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.835968 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.881132 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-utilities\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.881263 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwcht\" (UniqueName: \"kubernetes.io/projected/dfa00874-57d4-44e2-a6e0-50c389978f21-kube-api-access-qwcht\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.881430 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-catalog-content\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.881979 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-utilities\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.882123 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-catalog-content\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.905516 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwcht\" (UniqueName: \"kubernetes.io/projected/dfa00874-57d4-44e2-a6e0-50c389978f21-kube-api-access-qwcht\") pod \"community-operators-rwvgf\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.951735 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.983237 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-utilities\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.983309 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx7bz\" (UniqueName: \"kubernetes.io/projected/c0f6021c-b179-459f-9b3f-901c36c58d5f-kube-api-access-jx7bz\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:26 crc kubenswrapper[4911]: I0606 09:16:26.983963 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-catalog-content\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.085810 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-utilities\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.086231 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx7bz\" (UniqueName: \"kubernetes.io/projected/c0f6021c-b179-459f-9b3f-901c36c58d5f-kube-api-access-jx7bz\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.086317 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-catalog-content\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.086834 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-catalog-content\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.086828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-utilities\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.107389 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx7bz\" (UniqueName: \"kubernetes.io/projected/c0f6021c-b179-459f-9b3f-901c36c58d5f-kube-api-access-jx7bz\") pod \"certified-operators-g6x48\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.154040 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rwvgf"] Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.157372 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:27 crc kubenswrapper[4911]: W0606 09:16:27.172511 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfa00874_57d4_44e2_a6e0_50c389978f21.slice/crio-d63bd79b3a99cb471cce708be74f7089d01400d6609f269fea62fe457974ef5b WatchSource:0}: Error finding container d63bd79b3a99cb471cce708be74f7089d01400d6609f269fea62fe457974ef5b: Status 404 returned error can't find the container with id d63bd79b3a99cb471cce708be74f7089d01400d6609f269fea62fe457974ef5b Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.326476 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rwvgf" event={"ID":"dfa00874-57d4-44e2-a6e0-50c389978f21","Type":"ContainerStarted","Data":"d63bd79b3a99cb471cce708be74f7089d01400d6609f269fea62fe457974ef5b"} Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.333805 4911 generic.go:334] "Generic (PLEG): container finished" podID="b15e184d-dfb3-492c-a3aa-32e514493e6f" containerID="3d9a4da681928c3015dadece09cb4486905dc66b56f506187377ef2ee1a566fd" exitCode=0 Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.333908 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p29wq" event={"ID":"b15e184d-dfb3-492c-a3aa-32e514493e6f","Type":"ContainerDied","Data":"3d9a4da681928c3015dadece09cb4486905dc66b56f506187377ef2ee1a566fd"} Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.339113 4911 generic.go:334] "Generic (PLEG): container finished" podID="7006e423-c587-4159-aba3-aa155251dee6" containerID="1c51f748099839d4b6c90133e8dfdf734a82fa4f8914ccd942af2f718da29a20" exitCode=0 Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.339195 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rj7zg" event={"ID":"7006e423-c587-4159-aba3-aa155251dee6","Type":"ContainerDied","Data":"1c51f748099839d4b6c90133e8dfdf734a82fa4f8914ccd942af2f718da29a20"} Jun 06 09:16:27 crc kubenswrapper[4911]: I0606 09:16:27.372510 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g6x48"] Jun 06 09:16:27 crc kubenswrapper[4911]: W0606 09:16:27.386370 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0f6021c_b179_459f_9b3f_901c36c58d5f.slice/crio-c2e1c91cb20606b4428879708f6253513b361751d789752de11aacad4eac6db1 WatchSource:0}: Error finding container c2e1c91cb20606b4428879708f6253513b361751d789752de11aacad4eac6db1: Status 404 returned error can't find the container with id c2e1c91cb20606b4428879708f6253513b361751d789752de11aacad4eac6db1 Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.352122 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rj7zg" event={"ID":"7006e423-c587-4159-aba3-aa155251dee6","Type":"ContainerStarted","Data":"8c5b46bd14cb0030097da15cbf9f4ee9895e8efedea048748d93d53c719a2986"} Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.354549 4911 generic.go:334] "Generic (PLEG): container finished" podID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerID="49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70" exitCode=0 Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.354646 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rwvgf" event={"ID":"dfa00874-57d4-44e2-a6e0-50c389978f21","Type":"ContainerDied","Data":"49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70"} Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.357331 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerID="4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454" exitCode=0 Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.357397 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6x48" event={"ID":"c0f6021c-b179-459f-9b3f-901c36c58d5f","Type":"ContainerDied","Data":"4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454"} Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.357458 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6x48" event={"ID":"c0f6021c-b179-459f-9b3f-901c36c58d5f","Type":"ContainerStarted","Data":"c2e1c91cb20606b4428879708f6253513b361751d789752de11aacad4eac6db1"} Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.362350 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p29wq" event={"ID":"b15e184d-dfb3-492c-a3aa-32e514493e6f","Type":"ContainerStarted","Data":"203cdf42031ff2626d8709ec38a721e6898ebc4838876b5ba63634f50d99d41e"} Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.378441 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rj7zg" podStartSLOduration=1.797979464 podStartE2EDuration="4.378410001s" podCreationTimestamp="2025-06-06 09:16:24 +0000 UTC" firstStartedPulling="2025-06-06 09:16:25.307785059 +0000 UTC m=+196.583210602" lastFinishedPulling="2025-06-06 09:16:27.888215596 +0000 UTC m=+199.163641139" observedRunningTime="2025-06-06 09:16:28.376185788 +0000 UTC m=+199.651611341" watchObservedRunningTime="2025-06-06 09:16:28.378410001 +0000 UTC m=+199.653835534" Jun 06 09:16:28 crc kubenswrapper[4911]: I0606 09:16:28.406800 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p29wq" podStartSLOduration=1.880949022 podStartE2EDuration="4.406772401s" podCreationTimestamp="2025-06-06 09:16:24 +0000 UTC" firstStartedPulling="2025-06-06 09:16:25.301343475 +0000 UTC m=+196.576769018" lastFinishedPulling="2025-06-06 09:16:27.827166854 +0000 UTC m=+199.102592397" observedRunningTime="2025-06-06 09:16:28.402771478 +0000 UTC m=+199.678197041" watchObservedRunningTime="2025-06-06 09:16:28.406772401 +0000 UTC m=+199.682197944" Jun 06 09:16:29 crc kubenswrapper[4911]: I0606 09:16:29.371909 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6x48" event={"ID":"c0f6021c-b179-459f-9b3f-901c36c58d5f","Type":"ContainerStarted","Data":"ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689"} Jun 06 09:16:29 crc kubenswrapper[4911]: I0606 09:16:29.376194 4911 generic.go:334] "Generic (PLEG): container finished" podID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerID="a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393" exitCode=0 Jun 06 09:16:29 crc kubenswrapper[4911]: I0606 09:16:29.376312 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rwvgf" event={"ID":"dfa00874-57d4-44e2-a6e0-50c389978f21","Type":"ContainerDied","Data":"a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393"} Jun 06 09:16:30 crc kubenswrapper[4911]: I0606 09:16:30.385943 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rwvgf" event={"ID":"dfa00874-57d4-44e2-a6e0-50c389978f21","Type":"ContainerStarted","Data":"68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19"} Jun 06 09:16:30 crc kubenswrapper[4911]: I0606 09:16:30.389657 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerID="ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689" exitCode=0 Jun 06 09:16:30 crc kubenswrapper[4911]: I0606 09:16:30.389703 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6x48" event={"ID":"c0f6021c-b179-459f-9b3f-901c36c58d5f","Type":"ContainerDied","Data":"ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689"} Jun 06 09:16:32 crc kubenswrapper[4911]: I0606 09:16:32.404612 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6x48" event={"ID":"c0f6021c-b179-459f-9b3f-901c36c58d5f","Type":"ContainerStarted","Data":"4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838"} Jun 06 09:16:32 crc kubenswrapper[4911]: I0606 09:16:32.422512 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rwvgf" podStartSLOduration=4.960897528 podStartE2EDuration="6.422493339s" podCreationTimestamp="2025-06-06 09:16:26 +0000 UTC" firstStartedPulling="2025-06-06 09:16:28.356871444 +0000 UTC m=+199.632296977" lastFinishedPulling="2025-06-06 09:16:29.818467255 +0000 UTC m=+201.093892788" observedRunningTime="2025-06-06 09:16:30.406690256 +0000 UTC m=+201.682115809" watchObservedRunningTime="2025-06-06 09:16:32.422493339 +0000 UTC m=+203.697918882" Jun 06 09:16:32 crc kubenswrapper[4911]: I0606 09:16:32.423278 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g6x48" podStartSLOduration=4.714956911 podStartE2EDuration="6.423272431s" podCreationTimestamp="2025-06-06 09:16:26 +0000 UTC" firstStartedPulling="2025-06-06 09:16:28.36029297 +0000 UTC m=+199.635718513" lastFinishedPulling="2025-06-06 09:16:30.06860849 +0000 UTC m=+201.344034033" observedRunningTime="2025-06-06 09:16:32.421455 +0000 UTC m=+203.696880553" watchObservedRunningTime="2025-06-06 09:16:32.423272431 +0000 UTC m=+203.698697974" Jun 06 09:16:34 crc kubenswrapper[4911]: I0606 09:16:34.629229 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:34 crc kubenswrapper[4911]: I0606 09:16:34.629953 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:34 crc kubenswrapper[4911]: I0606 09:16:34.681638 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:34 crc kubenswrapper[4911]: I0606 09:16:34.741378 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:34 crc kubenswrapper[4911]: I0606 09:16:34.741462 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:34 crc kubenswrapper[4911]: I0606 09:16:34.783573 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:35 crc kubenswrapper[4911]: I0606 09:16:35.464701 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rj7zg" Jun 06 09:16:35 crc kubenswrapper[4911]: I0606 09:16:35.464985 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p29wq" Jun 06 09:16:36 crc kubenswrapper[4911]: I0606 09:16:36.952568 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:36 crc kubenswrapper[4911]: I0606 09:16:36.952921 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:36 crc kubenswrapper[4911]: I0606 09:16:36.992868 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:37 crc kubenswrapper[4911]: I0606 09:16:37.158156 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:37 crc kubenswrapper[4911]: I0606 09:16:37.158231 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:37 crc kubenswrapper[4911]: I0606 09:16:37.201689 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:37 crc kubenswrapper[4911]: I0606 09:16:37.470617 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:16:37 crc kubenswrapper[4911]: I0606 09:16:37.471628 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:16:54 crc kubenswrapper[4911]: I0606 09:16:54.300809 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:16:54 crc kubenswrapper[4911]: I0606 09:16:54.301494 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:16:54 crc kubenswrapper[4911]: I0606 09:16:54.301563 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:16:54 crc kubenswrapper[4911]: I0606 09:16:54.302547 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c837d3cdc3edeb58703b5c9ea9f4e1684698e0b2806ed5cdf89b0984d3907c7b"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:16:54 crc kubenswrapper[4911]: I0606 09:16:54.302685 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://c837d3cdc3edeb58703b5c9ea9f4e1684698e0b2806ed5cdf89b0984d3907c7b" gracePeriod=600 Jun 06 09:16:54 crc kubenswrapper[4911]: I0606 09:16:54.539164 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="c837d3cdc3edeb58703b5c9ea9f4e1684698e0b2806ed5cdf89b0984d3907c7b" exitCode=0 Jun 06 09:16:54 crc kubenswrapper[4911]: I0606 09:16:54.539214 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"c837d3cdc3edeb58703b5c9ea9f4e1684698e0b2806ed5cdf89b0984d3907c7b"} Jun 06 09:16:55 crc kubenswrapper[4911]: I0606 09:16:55.546851 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"24ec77e025b540903b1d2b7a8e32c883a2c9be3072b2687f98eefbbb340ac16a"} Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.195353 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xxxwr"] Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.197173 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.214379 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xxxwr"] Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.360645 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/228e576d-5367-4af2-a4ef-ed8eb0517c25-registry-certificates\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.360718 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95trh\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-kube-api-access-95trh\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.360816 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-bound-sa-token\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.360876 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-registry-tls\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.360919 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.360944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/228e576d-5367-4af2-a4ef-ed8eb0517c25-trusted-ca\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.360970 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/228e576d-5367-4af2-a4ef-ed8eb0517c25-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.361001 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/228e576d-5367-4af2-a4ef-ed8eb0517c25-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.387708 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.462849 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/228e576d-5367-4af2-a4ef-ed8eb0517c25-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.462930 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/228e576d-5367-4af2-a4ef-ed8eb0517c25-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.462974 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95trh\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-kube-api-access-95trh\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.462990 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/228e576d-5367-4af2-a4ef-ed8eb0517c25-registry-certificates\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.463020 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-bound-sa-token\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.463047 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-registry-tls\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.463067 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/228e576d-5367-4af2-a4ef-ed8eb0517c25-trusted-ca\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.464029 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/228e576d-5367-4af2-a4ef-ed8eb0517c25-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.464962 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/228e576d-5367-4af2-a4ef-ed8eb0517c25-trusted-ca\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.465087 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/228e576d-5367-4af2-a4ef-ed8eb0517c25-registry-certificates\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.476165 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-registry-tls\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.477272 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/228e576d-5367-4af2-a4ef-ed8eb0517c25-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.481025 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-bound-sa-token\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.494217 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95trh\" (UniqueName: \"kubernetes.io/projected/228e576d-5367-4af2-a4ef-ed8eb0517c25-kube-api-access-95trh\") pod \"image-registry-66df7c8f76-xxxwr\" (UID: \"228e576d-5367-4af2-a4ef-ed8eb0517c25\") " pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.514151 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:30 crc kubenswrapper[4911]: I0606 09:18:30.706652 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xxxwr"] Jun 06 09:18:31 crc kubenswrapper[4911]: I0606 09:18:31.064832 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" event={"ID":"228e576d-5367-4af2-a4ef-ed8eb0517c25","Type":"ContainerStarted","Data":"fdd29bbe1f01e0b48df6b3317699ebe8575a273ee420e6bf2605dbca39ef253e"} Jun 06 09:18:31 crc kubenswrapper[4911]: I0606 09:18:31.064909 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" event={"ID":"228e576d-5367-4af2-a4ef-ed8eb0517c25","Type":"ContainerStarted","Data":"af15f43999ba5bfaa0ecdca02fafa79be1b7c004e3891b8bc079b83346cfcdc8"} Jun 06 09:18:31 crc kubenswrapper[4911]: I0606 09:18:31.064943 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:31 crc kubenswrapper[4911]: I0606 09:18:31.087680 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" podStartSLOduration=1.087657318 podStartE2EDuration="1.087657318s" podCreationTimestamp="2025-06-06 09:18:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:18:31.086315652 +0000 UTC m=+322.361741215" watchObservedRunningTime="2025-06-06 09:18:31.087657318 +0000 UTC m=+322.363082861" Jun 06 09:18:50 crc kubenswrapper[4911]: I0606 09:18:50.521060 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-xxxwr" Jun 06 09:18:50 crc kubenswrapper[4911]: I0606 09:18:50.574909 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8zkfh"] Jun 06 09:18:54 crc kubenswrapper[4911]: I0606 09:18:54.300400 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:18:54 crc kubenswrapper[4911]: I0606 09:18:54.300717 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:19:15 crc kubenswrapper[4911]: I0606 09:19:15.616953 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" podUID="bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" containerName="registry" containerID="cri-o://0c53daa96c38e48f4177edb67a7631af5a633de7803534e2e5f6d8c10f55f998" gracePeriod=30 Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.302803 4911 generic.go:334] "Generic (PLEG): container finished" podID="bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" containerID="0c53daa96c38e48f4177edb67a7631af5a633de7803534e2e5f6d8c10f55f998" exitCode=0 Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.302899 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" event={"ID":"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f","Type":"ContainerDied","Data":"0c53daa96c38e48f4177edb67a7631af5a633de7803534e2e5f6d8c10f55f998"} Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.453380 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632003 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-tls\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632053 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-bound-sa-token\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632129 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-ca-trust-extracted\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632205 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-installation-pull-secrets\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632228 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8b4p\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-kube-api-access-c8b4p\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632254 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-certificates\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632400 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.632426 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-trusted-ca\") pod \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\" (UID: \"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f\") " Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.633447 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.634432 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.639370 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.639424 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.639922 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.640317 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-kube-api-access-c8b4p" (OuterVolumeSpecName: "kube-api-access-c8b4p") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "kube-api-access-c8b4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.643554 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.650034 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" (UID: "bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.733560 4911 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.733609 4911 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.733623 4911 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.733635 4911 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.733648 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8b4p\" (UniqueName: \"kubernetes.io/projected/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-kube-api-access-c8b4p\") on node \"crc\" DevicePath \"\"" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.733657 4911 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-registry-certificates\") on node \"crc\" DevicePath \"\"" Jun 06 09:19:16 crc kubenswrapper[4911]: I0606 09:19:16.733667 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f-trusted-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:19:17 crc kubenswrapper[4911]: I0606 09:19:17.310958 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" event={"ID":"bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f","Type":"ContainerDied","Data":"f5c65174ec3f00c6c2dc47e6d22679dfbc430141c6db2e26eef8b05b8e48e8ef"} Jun 06 09:19:17 crc kubenswrapper[4911]: I0606 09:19:17.311481 4911 scope.go:117] "RemoveContainer" containerID="0c53daa96c38e48f4177edb67a7631af5a633de7803534e2e5f6d8c10f55f998" Jun 06 09:19:17 crc kubenswrapper[4911]: I0606 09:19:17.311107 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8zkfh" Jun 06 09:19:17 crc kubenswrapper[4911]: I0606 09:19:17.336959 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8zkfh"] Jun 06 09:19:17 crc kubenswrapper[4911]: I0606 09:19:17.341199 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8zkfh"] Jun 06 09:19:17 crc kubenswrapper[4911]: I0606 09:19:17.955325 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" path="/var/lib/kubelet/pods/bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f/volumes" Jun 06 09:19:24 crc kubenswrapper[4911]: I0606 09:19:24.300611 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:19:24 crc kubenswrapper[4911]: I0606 09:19:24.300997 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.300920 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.301391 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.301441 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.302017 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"24ec77e025b540903b1d2b7a8e32c883a2c9be3072b2687f98eefbbb340ac16a"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.302063 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://24ec77e025b540903b1d2b7a8e32c883a2c9be3072b2687f98eefbbb340ac16a" gracePeriod=600 Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.519281 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="24ec77e025b540903b1d2b7a8e32c883a2c9be3072b2687f98eefbbb340ac16a" exitCode=0 Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.519376 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"24ec77e025b540903b1d2b7a8e32c883a2c9be3072b2687f98eefbbb340ac16a"} Jun 06 09:19:54 crc kubenswrapper[4911]: I0606 09:19:54.519641 4911 scope.go:117] "RemoveContainer" containerID="c837d3cdc3edeb58703b5c9ea9f4e1684698e0b2806ed5cdf89b0984d3907c7b" Jun 06 09:19:55 crc kubenswrapper[4911]: I0606 09:19:55.527135 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"16351863e8c8c00f3cd092f5ca35626763ec65854e86cd76f5f564cdc9f7a3ca"} Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.737338 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-v4mqj"] Jun 06 09:21:41 crc kubenswrapper[4911]: E0606 09:21:41.738155 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" containerName="registry" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.738167 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" containerName="registry" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.738275 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcfb5e53-2b1a-4a2d-8fd0-0299675ef90f" containerName="registry" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.738708 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.741193 4911 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-kg694" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.741247 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.743477 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.749159 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-zqwfb"] Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.749901 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-zqwfb" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.755178 4911 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-vxkkw" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.761353 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-v4mqj"] Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.777229 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-zqwfb"] Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.780008 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8mhxf"] Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.780828 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.782555 4911 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-zwtcx" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.785858 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8mhxf"] Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.851247 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq2zk\" (UniqueName: \"kubernetes.io/projected/cdc87445-bf03-4198-83c5-ff423ab48e27-kube-api-access-bq2zk\") pod \"cert-manager-webhook-5655c58dd6-8mhxf\" (UID: \"cdc87445-bf03-4198-83c5-ff423ab48e27\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.851297 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2jvn\" (UniqueName: \"kubernetes.io/projected/75f83d38-b516-4f79-b071-e57e93d6d35b-kube-api-access-r2jvn\") pod \"cert-manager-5b446d88c5-zqwfb\" (UID: \"75f83d38-b516-4f79-b071-e57e93d6d35b\") " pod="cert-manager/cert-manager-5b446d88c5-zqwfb" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.851438 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2rnm\" (UniqueName: \"kubernetes.io/projected/17fadf3c-2d8d-4b8d-93df-8fe99670ffde-kube-api-access-g2rnm\") pod \"cert-manager-cainjector-7f985d654d-v4mqj\" (UID: \"17fadf3c-2d8d-4b8d-93df-8fe99670ffde\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.952364 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq2zk\" (UniqueName: \"kubernetes.io/projected/cdc87445-bf03-4198-83c5-ff423ab48e27-kube-api-access-bq2zk\") pod \"cert-manager-webhook-5655c58dd6-8mhxf\" (UID: \"cdc87445-bf03-4198-83c5-ff423ab48e27\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.952411 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2jvn\" (UniqueName: \"kubernetes.io/projected/75f83d38-b516-4f79-b071-e57e93d6d35b-kube-api-access-r2jvn\") pod \"cert-manager-5b446d88c5-zqwfb\" (UID: \"75f83d38-b516-4f79-b071-e57e93d6d35b\") " pod="cert-manager/cert-manager-5b446d88c5-zqwfb" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.952462 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2rnm\" (UniqueName: \"kubernetes.io/projected/17fadf3c-2d8d-4b8d-93df-8fe99670ffde-kube-api-access-g2rnm\") pod \"cert-manager-cainjector-7f985d654d-v4mqj\" (UID: \"17fadf3c-2d8d-4b8d-93df-8fe99670ffde\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.970970 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2jvn\" (UniqueName: \"kubernetes.io/projected/75f83d38-b516-4f79-b071-e57e93d6d35b-kube-api-access-r2jvn\") pod \"cert-manager-5b446d88c5-zqwfb\" (UID: \"75f83d38-b516-4f79-b071-e57e93d6d35b\") " pod="cert-manager/cert-manager-5b446d88c5-zqwfb" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.972369 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2rnm\" (UniqueName: \"kubernetes.io/projected/17fadf3c-2d8d-4b8d-93df-8fe99670ffde-kube-api-access-g2rnm\") pod \"cert-manager-cainjector-7f985d654d-v4mqj\" (UID: \"17fadf3c-2d8d-4b8d-93df-8fe99670ffde\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" Jun 06 09:21:41 crc kubenswrapper[4911]: I0606 09:21:41.973622 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq2zk\" (UniqueName: \"kubernetes.io/projected/cdc87445-bf03-4198-83c5-ff423ab48e27-kube-api-access-bq2zk\") pod \"cert-manager-webhook-5655c58dd6-8mhxf\" (UID: \"cdc87445-bf03-4198-83c5-ff423ab48e27\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" Jun 06 09:21:42 crc kubenswrapper[4911]: I0606 09:21:42.058195 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" Jun 06 09:21:42 crc kubenswrapper[4911]: I0606 09:21:42.069327 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-zqwfb" Jun 06 09:21:42 crc kubenswrapper[4911]: I0606 09:21:42.107582 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" Jun 06 09:21:42 crc kubenswrapper[4911]: I0606 09:21:42.354018 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8mhxf"] Jun 06 09:21:42 crc kubenswrapper[4911]: I0606 09:21:42.359419 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:21:42 crc kubenswrapper[4911]: I0606 09:21:42.497999 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-v4mqj"] Jun 06 09:21:42 crc kubenswrapper[4911]: I0606 09:21:42.501058 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-zqwfb"] Jun 06 09:21:42 crc kubenswrapper[4911]: W0606 09:21:42.501338 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17fadf3c_2d8d_4b8d_93df_8fe99670ffde.slice/crio-928949e01e9fedeaa0136644fdbf07e15253bfe407f37692cf36ea148d021aa4 WatchSource:0}: Error finding container 928949e01e9fedeaa0136644fdbf07e15253bfe407f37692cf36ea148d021aa4: Status 404 returned error can't find the container with id 928949e01e9fedeaa0136644fdbf07e15253bfe407f37692cf36ea148d021aa4 Jun 06 09:21:42 crc kubenswrapper[4911]: W0606 09:21:42.504805 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75f83d38_b516_4f79_b071_e57e93d6d35b.slice/crio-409bd1b8f41df9a00ebff49f9fdc486dad4897d6ea9b0fe6b417d0174f79faf9 WatchSource:0}: Error finding container 409bd1b8f41df9a00ebff49f9fdc486dad4897d6ea9b0fe6b417d0174f79faf9: Status 404 returned error can't find the container with id 409bd1b8f41df9a00ebff49f9fdc486dad4897d6ea9b0fe6b417d0174f79faf9 Jun 06 09:21:43 crc kubenswrapper[4911]: I0606 09:21:43.092896 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-zqwfb" event={"ID":"75f83d38-b516-4f79-b071-e57e93d6d35b","Type":"ContainerStarted","Data":"409bd1b8f41df9a00ebff49f9fdc486dad4897d6ea9b0fe6b417d0174f79faf9"} Jun 06 09:21:43 crc kubenswrapper[4911]: I0606 09:21:43.093914 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" event={"ID":"cdc87445-bf03-4198-83c5-ff423ab48e27","Type":"ContainerStarted","Data":"ea6b745e3d2f1afcfc9d4e56bff919d26f9fc73ca00640cb5e3c3075118e3a1f"} Jun 06 09:21:43 crc kubenswrapper[4911]: I0606 09:21:43.095019 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" event={"ID":"17fadf3c-2d8d-4b8d-93df-8fe99670ffde","Type":"ContainerStarted","Data":"928949e01e9fedeaa0136644fdbf07e15253bfe407f37692cf36ea148d021aa4"} Jun 06 09:21:45 crc kubenswrapper[4911]: I0606 09:21:45.110125 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" event={"ID":"cdc87445-bf03-4198-83c5-ff423ab48e27","Type":"ContainerStarted","Data":"fbe1f26c6d347fdfc80bd44a1f4c3d20eb1333c247a449c98f483b2caf5d03cf"} Jun 06 09:21:45 crc kubenswrapper[4911]: I0606 09:21:45.112425 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" event={"ID":"17fadf3c-2d8d-4b8d-93df-8fe99670ffde","Type":"ContainerStarted","Data":"035dfa7b522a2ee004ab9c5437a13b074c974daec012676ff2077239e16e56ca"} Jun 06 09:21:45 crc kubenswrapper[4911]: I0606 09:21:45.127871 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" podStartSLOduration=1.721751418 podStartE2EDuration="4.127851137s" podCreationTimestamp="2025-06-06 09:21:41 +0000 UTC" firstStartedPulling="2025-06-06 09:21:42.359167977 +0000 UTC m=+513.634593520" lastFinishedPulling="2025-06-06 09:21:44.765267696 +0000 UTC m=+516.040693239" observedRunningTime="2025-06-06 09:21:45.125589378 +0000 UTC m=+516.401014951" watchObservedRunningTime="2025-06-06 09:21:45.127851137 +0000 UTC m=+516.403276680" Jun 06 09:21:45 crc kubenswrapper[4911]: I0606 09:21:45.144909 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-v4mqj" podStartSLOduration=1.887608632 podStartE2EDuration="4.144884239s" podCreationTimestamp="2025-06-06 09:21:41 +0000 UTC" firstStartedPulling="2025-06-06 09:21:42.50340008 +0000 UTC m=+513.778825623" lastFinishedPulling="2025-06-06 09:21:44.760675677 +0000 UTC m=+516.036101230" observedRunningTime="2025-06-06 09:21:45.14184809 +0000 UTC m=+516.417273633" watchObservedRunningTime="2025-06-06 09:21:45.144884239 +0000 UTC m=+516.420309782" Jun 06 09:21:46 crc kubenswrapper[4911]: I0606 09:21:46.119016 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-zqwfb" event={"ID":"75f83d38-b516-4f79-b071-e57e93d6d35b","Type":"ContainerStarted","Data":"9d8c3c584b41078376f46dc59a6c7473e5bcf7cb5d9343da4b8aa120eb2ab3d5"} Jun 06 09:21:46 crc kubenswrapper[4911]: I0606 09:21:46.119350 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" Jun 06 09:21:46 crc kubenswrapper[4911]: I0606 09:21:46.139943 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-zqwfb" podStartSLOduration=1.814117355 podStartE2EDuration="5.139912735s" podCreationTimestamp="2025-06-06 09:21:41 +0000 UTC" firstStartedPulling="2025-06-06 09:21:42.506811219 +0000 UTC m=+513.782236752" lastFinishedPulling="2025-06-06 09:21:45.832606589 +0000 UTC m=+517.108032132" observedRunningTime="2025-06-06 09:21:46.134516835 +0000 UTC m=+517.409942408" watchObservedRunningTime="2025-06-06 09:21:46.139912735 +0000 UTC m=+517.415338278" Jun 06 09:21:52 crc kubenswrapper[4911]: I0606 09:21:52.111403 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-8mhxf" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.303496 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gj94b"] Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.304188 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="nbdb" containerID="cri-o://ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.304329 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-node" containerID="cri-o://75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.304389 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="sbdb" containerID="cri-o://688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.304382 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="northd" containerID="cri-o://65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.304183 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-controller" containerID="cri-o://50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.304454 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.304423 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-acl-logging" containerID="cri-o://6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.343255 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovnkube-controller" containerID="cri-o://0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" gracePeriod=30 Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.643426 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gj94b_d355bc6b-ce57-4cb3-bf7b-b1339f64ae92/ovn-acl-logging/0.log" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.644017 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gj94b_d355bc6b-ce57-4cb3-bf7b-b1339f64ae92/ovn-controller/0.log" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.644910 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699341 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ckqpk"] Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699582 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-node" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699599 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-node" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699613 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovnkube-controller" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699620 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovnkube-controller" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699631 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-ovn-metrics" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699640 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-ovn-metrics" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699650 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-acl-logging" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699658 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-acl-logging" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699669 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="nbdb" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699676 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="nbdb" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699687 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-controller" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699694 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-controller" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699708 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="northd" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699718 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="northd" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699730 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="sbdb" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699738 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="sbdb" Jun 06 09:21:53 crc kubenswrapper[4911]: E0606 09:21:53.699755 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kubecfg-setup" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699763 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kubecfg-setup" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699903 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovnkube-controller" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699921 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-ovn-metrics" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699930 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="sbdb" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699943 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="nbdb" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699953 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-acl-logging" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699963 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="northd" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699972 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="kube-rbac-proxy-node" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.699983 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerName="ovn-controller" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.701723 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716233 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-bin\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716280 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-log-socket\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716352 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-netd\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716378 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716445 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-log-socket" (OuterVolumeSpecName: "log-socket") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716503 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovn-node-metrics-cert\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716533 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716549 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-script-lib\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716565 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-node-log\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716585 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-ovn\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716739 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-node-log" (OuterVolumeSpecName: "node-log") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.716794 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717017 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717070 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-kubelet\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717112 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-netns\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717135 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-env-overrides\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717155 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-slash\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717182 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-var-lib-cni-networks-ovn-kubernetes\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717266 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717301 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-slash" (OuterVolumeSpecName: "host-slash") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717317 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717379 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717622 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717706 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-systemd-units\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717772 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-systemd\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717797 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-openvswitch\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717822 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-etc-openvswitch\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717858 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-config\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717865 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717886 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-ovn-kubernetes\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717902 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717916 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bhlq\" (UniqueName: \"kubernetes.io/projected/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-kube-api-access-9bhlq\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717943 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717948 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-var-lib-openvswitch\") pod \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\" (UID: \"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92\") " Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.717980 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718391 4911 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-ovn\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718414 4911 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-kubelet\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718428 4911 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-slash\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718439 4911 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-netns\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718451 4911 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-env-overrides\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718452 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718463 4911 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718533 4911 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718555 4911 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718570 4911 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718586 4911 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718602 4911 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718615 4911 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-log-socket\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718640 4911 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718654 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718666 4911 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-node-log\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.718702 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.723817 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-kube-api-access-9bhlq" (OuterVolumeSpecName: "kube-api-access-9bhlq") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "kube-api-access-9bhlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.724674 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.733781 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" (UID: "d355bc6b-ce57-4cb3-bf7b-b1339f64ae92"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.821353 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f64xh\" (UniqueName: \"kubernetes.io/projected/8d42b02b-491a-4731-93ff-67f8f97eed24-kube-api-access-f64xh\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.821972 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-cni-netd\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822049 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-var-lib-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822144 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-systemd\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822165 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8d42b02b-491a-4731-93ff-67f8f97eed24-ovn-node-metrics-cert\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822218 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-log-socket\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822242 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-etc-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822282 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-env-overrides\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822309 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-ovnkube-config\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822387 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822434 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-cni-bin\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822470 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-ovn\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822517 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-run-netns\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822698 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-ovnkube-script-lib\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822824 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-kubelet\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822855 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-node-log\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822895 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-run-ovn-kubernetes\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.822944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-systemd-units\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.823065 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-slash\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.823219 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.823381 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.823410 4911 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-systemd-units\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.823426 4911 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-run-systemd\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.823441 4911 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.823453 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bhlq\" (UniqueName: \"kubernetes.io/projected/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92-kube-api-access-9bhlq\") on node \"crc\" DevicePath \"\"" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924368 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-ovnkube-config\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924436 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924455 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-cni-bin\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924483 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-ovn\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924502 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-run-netns\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924526 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-ovnkube-script-lib\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924544 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-node-log\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924561 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-kubelet\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924577 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-run-ovn-kubernetes\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924594 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-systemd-units\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924618 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-slash\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924642 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924666 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f64xh\" (UniqueName: \"kubernetes.io/projected/8d42b02b-491a-4731-93ff-67f8f97eed24-kube-api-access-f64xh\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924664 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-cni-bin\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924701 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-kubelet\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924683 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-cni-netd\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924664 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924712 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-slash\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924726 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924803 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-ovn\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924797 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-run-ovn-kubernetes\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924800 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-node-log\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924866 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-systemd-units\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924871 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-run-netns\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924709 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-host-cni-netd\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.924963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-var-lib-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925000 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-var-lib-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-systemd\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925113 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8d42b02b-491a-4731-93ff-67f8f97eed24-ovn-node-metrics-cert\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925143 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-log-socket\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925169 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-etc-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925117 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-run-systemd\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925191 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-env-overrides\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925223 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-log-socket\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925236 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8d42b02b-491a-4731-93ff-67f8f97eed24-etc-openvswitch\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925725 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-ovnkube-config\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.925767 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-env-overrides\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.926007 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8d42b02b-491a-4731-93ff-67f8f97eed24-ovnkube-script-lib\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.929039 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8d42b02b-491a-4731-93ff-67f8f97eed24-ovn-node-metrics-cert\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:53 crc kubenswrapper[4911]: I0606 09:21:53.943721 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f64xh\" (UniqueName: \"kubernetes.io/projected/8d42b02b-491a-4731-93ff-67f8f97eed24-kube-api-access-f64xh\") pod \"ovnkube-node-ckqpk\" (UID: \"8d42b02b-491a-4731-93ff-67f8f97eed24\") " pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.024915 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.173619 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fdfc2_962d1f6e-6277-4a04-ad9d-199d3f9f7e72/kube-multus/0.log" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.173900 4911 generic.go:334] "Generic (PLEG): container finished" podID="962d1f6e-6277-4a04-ad9d-199d3f9f7e72" containerID="fdb97be513aa3c68ba9fa14bf88ecddccdedefce0304f429656609ae37872dff" exitCode=2 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.174013 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fdfc2" event={"ID":"962d1f6e-6277-4a04-ad9d-199d3f9f7e72","Type":"ContainerDied","Data":"fdb97be513aa3c68ba9fa14bf88ecddccdedefce0304f429656609ae37872dff"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.174588 4911 scope.go:117] "RemoveContainer" containerID="fdb97be513aa3c68ba9fa14bf88ecddccdedefce0304f429656609ae37872dff" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.178661 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gj94b_d355bc6b-ce57-4cb3-bf7b-b1339f64ae92/ovn-acl-logging/0.log" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179329 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gj94b_d355bc6b-ce57-4cb3-bf7b-b1339f64ae92/ovn-controller/0.log" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179750 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" exitCode=0 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179828 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" exitCode=0 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179853 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179928 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179947 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179969 4911 scope.go:117] "RemoveContainer" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179834 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.179894 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" exitCode=0 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180165 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" exitCode=0 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180241 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" exitCode=0 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180307 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" exitCode=0 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180357 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" exitCode=143 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180409 4911 generic.go:334] "Generic (PLEG): container finished" podID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" containerID="50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" exitCode=143 Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180247 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180587 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.180663 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181211 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181278 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181466 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181535 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181606 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181667 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181719 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181765 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181816 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181864 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181912 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.181955 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182000 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182055 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182139 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182191 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182243 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182309 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182402 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182469 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182531 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182606 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182692 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182769 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gj94b" event={"ID":"d355bc6b-ce57-4cb3-bf7b-b1339f64ae92","Type":"ContainerDied","Data":"418c72e49ceb1a465238316e949276dcafcfc97f1ef8a9dd520410afbf57c879"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182845 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182920 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.182997 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183063 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183158 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183233 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183300 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183354 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183399 4911 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183450 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"2da8ceca2d9c391e6e2f0b3d74372b0ac4e99ddcbdc2d66d4a4a305a7b1814cd"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.183515 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"64bd7a6d21101af337cfc3745c499c35a9b13d584896fdbbd03b3f316ef24efe"} Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.201969 4911 scope.go:117] "RemoveContainer" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.223997 4911 scope.go:117] "RemoveContainer" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.240005 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gj94b"] Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.245235 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gj94b"] Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.280385 4911 scope.go:117] "RemoveContainer" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.300849 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.300934 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.304968 4911 scope.go:117] "RemoveContainer" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.348546 4911 scope.go:117] "RemoveContainer" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.363958 4911 scope.go:117] "RemoveContainer" containerID="6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.378005 4911 scope.go:117] "RemoveContainer" containerID="50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.394022 4911 scope.go:117] "RemoveContainer" containerID="4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.412420 4911 scope.go:117] "RemoveContainer" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.413993 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": container with ID starting with 0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e not found: ID does not exist" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.414058 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} err="failed to get container status \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": rpc error: code = NotFound desc = could not find container \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": container with ID starting with 0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.414087 4911 scope.go:117] "RemoveContainer" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.414583 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": container with ID starting with 688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f not found: ID does not exist" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.414640 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} err="failed to get container status \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": rpc error: code = NotFound desc = could not find container \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": container with ID starting with 688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.414673 4911 scope.go:117] "RemoveContainer" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.415468 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": container with ID starting with ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4 not found: ID does not exist" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.415522 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} err="failed to get container status \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": rpc error: code = NotFound desc = could not find container \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": container with ID starting with ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.415553 4911 scope.go:117] "RemoveContainer" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.416002 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": container with ID starting with 65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0 not found: ID does not exist" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.416034 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} err="failed to get container status \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": rpc error: code = NotFound desc = could not find container \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": container with ID starting with 65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.416053 4911 scope.go:117] "RemoveContainer" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.416425 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": container with ID starting with 627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101 not found: ID does not exist" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.416454 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} err="failed to get container status \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": rpc error: code = NotFound desc = could not find container \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": container with ID starting with 627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.416472 4911 scope.go:117] "RemoveContainer" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.416780 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": container with ID starting with 75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6 not found: ID does not exist" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.416807 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} err="failed to get container status \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": rpc error: code = NotFound desc = could not find container \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": container with ID starting with 75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.416823 4911 scope.go:117] "RemoveContainer" containerID="6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.417110 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": container with ID starting with 6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a not found: ID does not exist" containerID="6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.417136 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} err="failed to get container status \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": rpc error: code = NotFound desc = could not find container \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": container with ID starting with 6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.417153 4911 scope.go:117] "RemoveContainer" containerID="50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.417588 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": container with ID starting with 50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd not found: ID does not exist" containerID="50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.417614 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} err="failed to get container status \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": rpc error: code = NotFound desc = could not find container \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": container with ID starting with 50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.417634 4911 scope.go:117] "RemoveContainer" containerID="4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885" Jun 06 09:21:54 crc kubenswrapper[4911]: E0606 09:21:54.418043 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": container with ID starting with 4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885 not found: ID does not exist" containerID="4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.418108 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} err="failed to get container status \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": rpc error: code = NotFound desc = could not find container \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": container with ID starting with 4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.418126 4911 scope.go:117] "RemoveContainer" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.419918 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} err="failed to get container status \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": rpc error: code = NotFound desc = could not find container \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": container with ID starting with 0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.419950 4911 scope.go:117] "RemoveContainer" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.420370 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} err="failed to get container status \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": rpc error: code = NotFound desc = could not find container \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": container with ID starting with 688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.420420 4911 scope.go:117] "RemoveContainer" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.420799 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} err="failed to get container status \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": rpc error: code = NotFound desc = could not find container \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": container with ID starting with ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.420851 4911 scope.go:117] "RemoveContainer" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.421262 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} err="failed to get container status \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": rpc error: code = NotFound desc = could not find container \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": container with ID starting with 65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.421302 4911 scope.go:117] "RemoveContainer" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.421674 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} err="failed to get container status \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": rpc error: code = NotFound desc = could not find container \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": container with ID starting with 627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.421693 4911 scope.go:117] "RemoveContainer" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422129 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} err="failed to get container status \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": rpc error: code = NotFound desc = could not find container \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": container with ID starting with 75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422148 4911 scope.go:117] "RemoveContainer" containerID="6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422408 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} err="failed to get container status \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": rpc error: code = NotFound desc = could not find container \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": container with ID starting with 6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422441 4911 scope.go:117] "RemoveContainer" containerID="50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422694 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} err="failed to get container status \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": rpc error: code = NotFound desc = could not find container \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": container with ID starting with 50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422712 4911 scope.go:117] "RemoveContainer" containerID="4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422920 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} err="failed to get container status \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": rpc error: code = NotFound desc = could not find container \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": container with ID starting with 4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.422940 4911 scope.go:117] "RemoveContainer" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.423522 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} err="failed to get container status \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": rpc error: code = NotFound desc = could not find container \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": container with ID starting with 0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.423540 4911 scope.go:117] "RemoveContainer" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.424042 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} err="failed to get container status \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": rpc error: code = NotFound desc = could not find container \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": container with ID starting with 688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.424059 4911 scope.go:117] "RemoveContainer" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.424418 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} err="failed to get container status \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": rpc error: code = NotFound desc = could not find container \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": container with ID starting with ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.424438 4911 scope.go:117] "RemoveContainer" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.425588 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} err="failed to get container status \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": rpc error: code = NotFound desc = could not find container \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": container with ID starting with 65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.425674 4911 scope.go:117] "RemoveContainer" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.426233 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} err="failed to get container status \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": rpc error: code = NotFound desc = could not find container \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": container with ID starting with 627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.426256 4911 scope.go:117] "RemoveContainer" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.427502 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} err="failed to get container status \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": rpc error: code = NotFound desc = could not find container \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": container with ID starting with 75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.427539 4911 scope.go:117] "RemoveContainer" containerID="6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.428125 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} err="failed to get container status \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": rpc error: code = NotFound desc = could not find container \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": container with ID starting with 6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.428198 4911 scope.go:117] "RemoveContainer" containerID="50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.428923 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} err="failed to get container status \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": rpc error: code = NotFound desc = could not find container \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": container with ID starting with 50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.428958 4911 scope.go:117] "RemoveContainer" containerID="4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.429499 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} err="failed to get container status \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": rpc error: code = NotFound desc = could not find container \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": container with ID starting with 4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.429538 4911 scope.go:117] "RemoveContainer" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.430137 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} err="failed to get container status \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": rpc error: code = NotFound desc = could not find container \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": container with ID starting with 0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.430171 4911 scope.go:117] "RemoveContainer" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.430752 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} err="failed to get container status \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": rpc error: code = NotFound desc = could not find container \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": container with ID starting with 688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.430774 4911 scope.go:117] "RemoveContainer" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.431115 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} err="failed to get container status \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": rpc error: code = NotFound desc = could not find container \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": container with ID starting with ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.431168 4911 scope.go:117] "RemoveContainer" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.431424 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} err="failed to get container status \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": rpc error: code = NotFound desc = could not find container \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": container with ID starting with 65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.431445 4911 scope.go:117] "RemoveContainer" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.431757 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} err="failed to get container status \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": rpc error: code = NotFound desc = could not find container \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": container with ID starting with 627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.431777 4911 scope.go:117] "RemoveContainer" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.432128 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} err="failed to get container status \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": rpc error: code = NotFound desc = could not find container \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": container with ID starting with 75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.432157 4911 scope.go:117] "RemoveContainer" containerID="6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.432421 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a"} err="failed to get container status \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": rpc error: code = NotFound desc = could not find container \"6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a\": container with ID starting with 6e088010adc677e1898769db2867b1234fc5da7c2171e22efad86174d63c143a not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.432437 4911 scope.go:117] "RemoveContainer" containerID="50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.432773 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd"} err="failed to get container status \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": rpc error: code = NotFound desc = could not find container \"50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd\": container with ID starting with 50073c7a43278fe58ab5dbd76226d07445efd5fd1c8ef19566764eabede1fcdd not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.432813 4911 scope.go:117] "RemoveContainer" containerID="4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.433282 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885"} err="failed to get container status \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": rpc error: code = NotFound desc = could not find container \"4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885\": container with ID starting with 4574ad6d280299f3a3e8743eb6504668de5867bed437001e879a3751e4112885 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.433310 4911 scope.go:117] "RemoveContainer" containerID="0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.433662 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e"} err="failed to get container status \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": rpc error: code = NotFound desc = could not find container \"0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e\": container with ID starting with 0332b4084223774b86a05d0666f1f4ad9c462c8178a7e3d32cb3bcabfd60ad2e not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.433683 4911 scope.go:117] "RemoveContainer" containerID="688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.433973 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f"} err="failed to get container status \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": rpc error: code = NotFound desc = could not find container \"688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f\": container with ID starting with 688af0f498608dc19caae26c4b17480469588e1e896490af61562b233b85dd7f not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.433990 4911 scope.go:117] "RemoveContainer" containerID="ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.434315 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4"} err="failed to get container status \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": rpc error: code = NotFound desc = could not find container \"ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4\": container with ID starting with ff7bbf04c2a4e4049ffbc45004471b9cd7523d478fee770cb3b58e70730a99f4 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.434402 4911 scope.go:117] "RemoveContainer" containerID="65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.434743 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0"} err="failed to get container status \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": rpc error: code = NotFound desc = could not find container \"65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0\": container with ID starting with 65deb689d0db77f67b98381834ba4595bb9e7a86d503fb4a82958a1a208d3fe0 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.434778 4911 scope.go:117] "RemoveContainer" containerID="627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.435207 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101"} err="failed to get container status \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": rpc error: code = NotFound desc = could not find container \"627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101\": container with ID starting with 627c5bd24b4d89a2786e681860f79114c9f1dc3fe029779e27cf4bf07be28101 not found: ID does not exist" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.435250 4911 scope.go:117] "RemoveContainer" containerID="75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6" Jun 06 09:21:54 crc kubenswrapper[4911]: I0606 09:21:54.435644 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6"} err="failed to get container status \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": rpc error: code = NotFound desc = could not find container \"75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6\": container with ID starting with 75b8f271a819806e33ffe9a87bd9ecfc59ad682327c05ae04289a55a383fc7f6 not found: ID does not exist" Jun 06 09:21:55 crc kubenswrapper[4911]: I0606 09:21:55.190463 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-fdfc2_962d1f6e-6277-4a04-ad9d-199d3f9f7e72/kube-multus/0.log" Jun 06 09:21:55 crc kubenswrapper[4911]: I0606 09:21:55.190563 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-fdfc2" event={"ID":"962d1f6e-6277-4a04-ad9d-199d3f9f7e72","Type":"ContainerStarted","Data":"1dc513acf40d234aff788fa7f3814e51b05ce10a4525cea5056d8b94f2811047"} Jun 06 09:21:55 crc kubenswrapper[4911]: I0606 09:21:55.195238 4911 generic.go:334] "Generic (PLEG): container finished" podID="8d42b02b-491a-4731-93ff-67f8f97eed24" containerID="2da8ceca2d9c391e6e2f0b3d74372b0ac4e99ddcbdc2d66d4a4a305a7b1814cd" exitCode=0 Jun 06 09:21:55 crc kubenswrapper[4911]: I0606 09:21:55.195310 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerDied","Data":"2da8ceca2d9c391e6e2f0b3d74372b0ac4e99ddcbdc2d66d4a4a305a7b1814cd"} Jun 06 09:21:55 crc kubenswrapper[4911]: I0606 09:21:55.954935 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d355bc6b-ce57-4cb3-bf7b-b1339f64ae92" path="/var/lib/kubelet/pods/d355bc6b-ce57-4cb3-bf7b-b1339f64ae92/volumes" Jun 06 09:21:56 crc kubenswrapper[4911]: I0606 09:21:56.203378 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"a3ded9d798ee19ef7c760c79cf3d2f38fd5a960c49084c46f2767f5b70b0212c"} Jun 06 09:21:56 crc kubenswrapper[4911]: I0606 09:21:56.203437 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"3c53ecf699fed01295a4aca67ade8358cf09167d112e61ae5364539329e3aa30"} Jun 06 09:21:56 crc kubenswrapper[4911]: I0606 09:21:56.203448 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"f3b2174204106e28985223e3630e1863d16b5d1cf7ce800921064db57e337ac1"} Jun 06 09:21:56 crc kubenswrapper[4911]: I0606 09:21:56.203474 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"4b524f2c7ec194da30f3d8c647bb2ef1354b46b9c5bceb90ca3d5cee9c3785bf"} Jun 06 09:21:56 crc kubenswrapper[4911]: I0606 09:21:56.203485 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"64c054ef2b59c9f366bc1a8b7db67884821d23de7b8ea43f2fe5dc92c9dec88e"} Jun 06 09:21:56 crc kubenswrapper[4911]: I0606 09:21:56.203495 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"e2a43f23dec4176e63725e6cfec58fbd20020d849a1a89a3201c9263d07804f0"} Jun 06 09:21:59 crc kubenswrapper[4911]: I0606 09:21:59.230043 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"ee87936c43f72913c9e1afbe045e56ae4535cd090142f77b3fd55aed9934102f"} Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.246963 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" event={"ID":"8d42b02b-491a-4731-93ff-67f8f97eed24","Type":"ContainerStarted","Data":"753cb252ce03087fb90f69504f3c619638e729049304107709157b7780f251dd"} Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.247675 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.247697 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.247708 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.284939 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.285428 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.312818 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" podStartSLOduration=8.312797617 podStartE2EDuration="8.312797617s" podCreationTimestamp="2025-06-06 09:21:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:22:01.279130919 +0000 UTC m=+532.554556462" watchObservedRunningTime="2025-06-06 09:22:01.312797617 +0000 UTC m=+532.588223150" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.380814 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["default/crc-debug-6dx8j"] Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.381817 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="default/crc-debug-6dx8j" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.383862 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"default"/"kube-root-ca.crt" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.384179 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"default"/"openshift-service-ca.crt" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.384199 4911 reflector.go:368] Caches populated for *v1.Secret from object-"default"/"default-dockercfg-xqf5w" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.428350 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg2sl\" (UniqueName: \"kubernetes.io/projected/594b89fa-84c5-419b-99ae-963b83e43e45-kube-api-access-vg2sl\") pod \"crc-debug-6dx8j\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " pod="default/crc-debug-6dx8j" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.428433 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594b89fa-84c5-419b-99ae-963b83e43e45-host\") pod \"crc-debug-6dx8j\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " pod="default/crc-debug-6dx8j" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.529853 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594b89fa-84c5-419b-99ae-963b83e43e45-host\") pod \"crc-debug-6dx8j\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " pod="default/crc-debug-6dx8j" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.529957 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg2sl\" (UniqueName: \"kubernetes.io/projected/594b89fa-84c5-419b-99ae-963b83e43e45-kube-api-access-vg2sl\") pod \"crc-debug-6dx8j\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " pod="default/crc-debug-6dx8j" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.530009 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594b89fa-84c5-419b-99ae-963b83e43e45-host\") pod \"crc-debug-6dx8j\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " pod="default/crc-debug-6dx8j" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.546670 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg2sl\" (UniqueName: \"kubernetes.io/projected/594b89fa-84c5-419b-99ae-963b83e43e45-kube-api-access-vg2sl\") pod \"crc-debug-6dx8j\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " pod="default/crc-debug-6dx8j" Jun 06 09:22:01 crc kubenswrapper[4911]: I0606 09:22:01.700890 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="default/crc-debug-6dx8j" Jun 06 09:22:02 crc kubenswrapper[4911]: I0606 09:22:02.253279 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="default/crc-debug-6dx8j" event={"ID":"594b89fa-84c5-419b-99ae-963b83e43e45","Type":"ContainerStarted","Data":"4cb29fbcecf01577cb270c18d31c3e838c59de5ce22110ee3f6ed350f8d80f55"} Jun 06 09:22:09 crc kubenswrapper[4911]: I0606 09:22:09.294042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="default/crc-debug-6dx8j" event={"ID":"594b89fa-84c5-419b-99ae-963b83e43e45","Type":"ContainerStarted","Data":"3aa905ce217b286ab5180b74839ea30e62970cb64fa7bbbc0f69dffd85c51aaf"} Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.142379 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="default/crc-debug-6dx8j" podStartSLOduration=7.236774346 podStartE2EDuration="14.142352589s" podCreationTimestamp="2025-06-06 09:22:01 +0000 UTC" firstStartedPulling="2025-06-06 09:22:01.725733539 +0000 UTC m=+533.001159092" lastFinishedPulling="2025-06-06 09:22:08.631311792 +0000 UTC m=+539.906737335" observedRunningTime="2025-06-06 09:22:09.309607368 +0000 UTC m=+540.585032921" watchObservedRunningTime="2025-06-06 09:22:15.142352589 +0000 UTC m=+546.417778122" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.144081 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph"] Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.145140 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.148064 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.148455 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.152251 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-75cjt" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.240056 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-log\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.240177 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf9sn\" (UniqueName: \"kubernetes.io/projected/fcb703c1-a42a-4e53-8bcb-6279a76856c1-kube-api-access-lf9sn\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.240204 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-run\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.240265 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-data\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.341330 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-data\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.341419 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-log\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.341477 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf9sn\" (UniqueName: \"kubernetes.io/projected/fcb703c1-a42a-4e53-8bcb-6279a76856c1-kube-api-access-lf9sn\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.341504 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-run\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.342077 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-run\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.342320 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-log\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.342703 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/fcb703c1-a42a-4e53-8bcb-6279a76856c1-data\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.375129 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf9sn\" (UniqueName: \"kubernetes.io/projected/fcb703c1-a42a-4e53-8bcb-6279a76856c1-kube-api-access-lf9sn\") pod \"ceph\" (UID: \"fcb703c1-a42a-4e53-8bcb-6279a76856c1\") " pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: I0606 09:22:15.462725 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph" Jun 06 09:22:15 crc kubenswrapper[4911]: W0606 09:22:15.490804 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcb703c1_a42a_4e53_8bcb_6279a76856c1.slice/crio-678ff195d1cdd16e33386a0847c140f5b1df04cd447a805a6dca96fea41214c9 WatchSource:0}: Error finding container 678ff195d1cdd16e33386a0847c140f5b1df04cd447a805a6dca96fea41214c9: Status 404 returned error can't find the container with id 678ff195d1cdd16e33386a0847c140f5b1df04cd447a805a6dca96fea41214c9 Jun 06 09:22:16 crc kubenswrapper[4911]: I0606 09:22:16.333977 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph" event={"ID":"fcb703c1-a42a-4e53-8bcb-6279a76856c1","Type":"ContainerStarted","Data":"678ff195d1cdd16e33386a0847c140f5b1df04cd447a805a6dca96fea41214c9"} Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.317243 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["default/crc-debug-6dx8j"] Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.317868 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="default/crc-debug-6dx8j" podUID="594b89fa-84c5-419b-99ae-963b83e43e45" containerName="container-00" containerID="cri-o://3aa905ce217b286ab5180b74839ea30e62970cb64fa7bbbc0f69dffd85c51aaf" gracePeriod=2 Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.320081 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["default/crc-debug-6dx8j"] Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.350473 4911 generic.go:334] "Generic (PLEG): container finished" podID="594b89fa-84c5-419b-99ae-963b83e43e45" containerID="3aa905ce217b286ab5180b74839ea30e62970cb64fa7bbbc0f69dffd85c51aaf" exitCode=0 Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.350524 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cb29fbcecf01577cb270c18d31c3e838c59de5ce22110ee3f6ed350f8d80f55" Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.396072 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="default/crc-debug-6dx8j" Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.494406 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594b89fa-84c5-419b-99ae-963b83e43e45-host\") pod \"594b89fa-84c5-419b-99ae-963b83e43e45\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.494462 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg2sl\" (UniqueName: \"kubernetes.io/projected/594b89fa-84c5-419b-99ae-963b83e43e45-kube-api-access-vg2sl\") pod \"594b89fa-84c5-419b-99ae-963b83e43e45\" (UID: \"594b89fa-84c5-419b-99ae-963b83e43e45\") " Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.495165 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/594b89fa-84c5-419b-99ae-963b83e43e45-host" (OuterVolumeSpecName: "host") pod "594b89fa-84c5-419b-99ae-963b83e43e45" (UID: "594b89fa-84c5-419b-99ae-963b83e43e45"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.499569 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/594b89fa-84c5-419b-99ae-963b83e43e45-kube-api-access-vg2sl" (OuterVolumeSpecName: "kube-api-access-vg2sl") pod "594b89fa-84c5-419b-99ae-963b83e43e45" (UID: "594b89fa-84c5-419b-99ae-963b83e43e45"). InnerVolumeSpecName "kube-api-access-vg2sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.595722 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/594b89fa-84c5-419b-99ae-963b83e43e45-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.595763 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg2sl\" (UniqueName: \"kubernetes.io/projected/594b89fa-84c5-419b-99ae-963b83e43e45-kube-api-access-vg2sl\") on node \"crc\" DevicePath \"\"" Jun 06 09:22:19 crc kubenswrapper[4911]: I0606 09:22:19.956010 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="594b89fa-84c5-419b-99ae-963b83e43e45" path="/var/lib/kubelet/pods/594b89fa-84c5-419b-99ae-963b83e43e45/volumes" Jun 06 09:22:20 crc kubenswrapper[4911]: I0606 09:22:20.360323 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="default/crc-debug-6dx8j" Jun 06 09:22:24 crc kubenswrapper[4911]: I0606 09:22:24.050991 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ckqpk" Jun 06 09:22:24 crc kubenswrapper[4911]: I0606 09:22:24.300875 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:22:24 crc kubenswrapper[4911]: I0606 09:22:24.300936 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:22:31 crc kubenswrapper[4911]: I0606 09:22:31.418314 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph" event={"ID":"fcb703c1-a42a-4e53-8bcb-6279a76856c1","Type":"ContainerStarted","Data":"5d6ff51c81ced9491833f5194506de04f1e6ecf9848dca0d8808353c2f2837f7"} Jun 06 09:22:31 crc kubenswrapper[4911]: I0606 09:22:31.438235 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph" podStartSLOduration=1.238925215 podStartE2EDuration="16.438213229s" podCreationTimestamp="2025-06-06 09:22:15 +0000 UTC" firstStartedPulling="2025-06-06 09:22:15.493569309 +0000 UTC m=+546.768994862" lastFinishedPulling="2025-06-06 09:22:30.692857333 +0000 UTC m=+561.968282876" observedRunningTime="2025-06-06 09:22:31.434278507 +0000 UTC m=+562.709704050" watchObservedRunningTime="2025-06-06 09:22:31.438213229 +0000 UTC m=+562.713638772" Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.300495 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.301718 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.301835 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.303136 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"16351863e8c8c00f3cd092f5ca35626763ec65854e86cd76f5f564cdc9f7a3ca"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.303239 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://16351863e8c8c00f3cd092f5ca35626763ec65854e86cd76f5f564cdc9f7a3ca" gracePeriod=600 Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.526213 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="16351863e8c8c00f3cd092f5ca35626763ec65854e86cd76f5f564cdc9f7a3ca" exitCode=0 Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.526374 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"16351863e8c8c00f3cd092f5ca35626763ec65854e86cd76f5f564cdc9f7a3ca"} Jun 06 09:22:54 crc kubenswrapper[4911]: I0606 09:22:54.526544 4911 scope.go:117] "RemoveContainer" containerID="24ec77e025b540903b1d2b7a8e32c883a2c9be3072b2687f98eefbbb340ac16a" Jun 06 09:22:55 crc kubenswrapper[4911]: I0606 09:22:55.533762 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"00a6b7c957b04d6217e597e04550f48b111aa26d7d1dc819e0c3ab94dfcdb9d6"} Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.664272 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-t9zdw"] Jun 06 09:23:01 crc kubenswrapper[4911]: E0606 09:23:01.664977 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594b89fa-84c5-419b-99ae-963b83e43e45" containerName="container-00" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.664988 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="594b89fa-84c5-419b-99ae-963b83e43e45" containerName="container-00" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.665107 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="594b89fa-84c5-419b-99ae-963b83e43e45" containerName="container-00" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.665511 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t9zdw" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.766726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hfcz\" (UniqueName: \"kubernetes.io/projected/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-kube-api-access-5hfcz\") pod \"crc-debug-t9zdw\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " pod="openstack/crc-debug-t9zdw" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.766913 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-host\") pod \"crc-debug-t9zdw\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " pod="openstack/crc-debug-t9zdw" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.868854 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hfcz\" (UniqueName: \"kubernetes.io/projected/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-kube-api-access-5hfcz\") pod \"crc-debug-t9zdw\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " pod="openstack/crc-debug-t9zdw" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.869011 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-host\") pod \"crc-debug-t9zdw\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " pod="openstack/crc-debug-t9zdw" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.869169 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-host\") pod \"crc-debug-t9zdw\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " pod="openstack/crc-debug-t9zdw" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.893034 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hfcz\" (UniqueName: \"kubernetes.io/projected/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-kube-api-access-5hfcz\") pod \"crc-debug-t9zdw\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " pod="openstack/crc-debug-t9zdw" Jun 06 09:23:01 crc kubenswrapper[4911]: I0606 09:23:01.988359 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t9zdw" Jun 06 09:23:02 crc kubenswrapper[4911]: I0606 09:23:02.590324 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-t9zdw" event={"ID":"e1aaa6f6-d93b-41e4-af48-fb3eece73a31","Type":"ContainerStarted","Data":"a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d"} Jun 06 09:23:02 crc kubenswrapper[4911]: I0606 09:23:02.590910 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-t9zdw" event={"ID":"e1aaa6f6-d93b-41e4-af48-fb3eece73a31","Type":"ContainerStarted","Data":"30bfef211f217268511cb196793bb6df58ca2912b732290eb359e07ac169c3e2"} Jun 06 09:23:02 crc kubenswrapper[4911]: I0606 09:23:02.607902 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-t9zdw" podStartSLOduration=1.607881913 podStartE2EDuration="1.607881913s" podCreationTimestamp="2025-06-06 09:23:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:23:02.606510248 +0000 UTC m=+593.881935791" watchObservedRunningTime="2025-06-06 09:23:02.607881913 +0000 UTC m=+593.883307476" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.440529 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-t9zdw"] Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.441438 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-t9zdw" podUID="e1aaa6f6-d93b-41e4-af48-fb3eece73a31" containerName="container-00" containerID="cri-o://a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d" gracePeriod=2 Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.444837 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-t9zdw"] Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.508071 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t9zdw" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.603750 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hfcz\" (UniqueName: \"kubernetes.io/projected/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-kube-api-access-5hfcz\") pod \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.603807 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-host\") pod \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\" (UID: \"e1aaa6f6-d93b-41e4-af48-fb3eece73a31\") " Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.603938 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-host" (OuterVolumeSpecName: "host") pod "e1aaa6f6-d93b-41e4-af48-fb3eece73a31" (UID: "e1aaa6f6-d93b-41e4-af48-fb3eece73a31"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.604180 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.611927 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-kube-api-access-5hfcz" (OuterVolumeSpecName: "kube-api-access-5hfcz") pod "e1aaa6f6-d93b-41e4-af48-fb3eece73a31" (UID: "e1aaa6f6-d93b-41e4-af48-fb3eece73a31"). InnerVolumeSpecName "kube-api-access-5hfcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.643404 4911 generic.go:334] "Generic (PLEG): container finished" podID="e1aaa6f6-d93b-41e4-af48-fb3eece73a31" containerID="a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d" exitCode=0 Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.643476 4911 scope.go:117] "RemoveContainer" containerID="a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.643545 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t9zdw" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.667445 4911 scope.go:117] "RemoveContainer" containerID="a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d" Jun 06 09:23:12 crc kubenswrapper[4911]: E0606 09:23:12.668082 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d\": container with ID starting with a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d not found: ID does not exist" containerID="a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.668165 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d"} err="failed to get container status \"a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d\": rpc error: code = NotFound desc = could not find container \"a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d\": container with ID starting with a05c1e5379f5b987659cd6ea94b853a36b46c307529a9920ce91f016c89cc54d not found: ID does not exist" Jun 06 09:23:12 crc kubenswrapper[4911]: I0606 09:23:12.706218 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hfcz\" (UniqueName: \"kubernetes.io/projected/e1aaa6f6-d93b-41e4-af48-fb3eece73a31-kube-api-access-5hfcz\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:13 crc kubenswrapper[4911]: I0606 09:23:13.959410 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1aaa6f6-d93b-41e4-af48-fb3eece73a31" path="/var/lib/kubelet/pods/e1aaa6f6-d93b-41e4-af48-fb3eece73a31/volumes" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.276267 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p5p9k"] Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.277494 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" podUID="99e87a63-ddac-4e72-9f32-aff82d073d08" containerName="controller-manager" containerID="cri-o://bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc" gracePeriod=30 Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.375793 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb"] Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.376153 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" podUID="9999ca00-de82-4451-bfe9-c216be6edd43" containerName="route-controller-manager" containerID="cri-o://3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6" gracePeriod=30 Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.723535 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.776951 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.830679 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-client-ca\") pod \"99e87a63-ddac-4e72-9f32-aff82d073d08\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.830744 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22tqd\" (UniqueName: \"kubernetes.io/projected/99e87a63-ddac-4e72-9f32-aff82d073d08-kube-api-access-22tqd\") pod \"99e87a63-ddac-4e72-9f32-aff82d073d08\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.830914 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-config\") pod \"9999ca00-de82-4451-bfe9-c216be6edd43\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.830966 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles\") pod \"99e87a63-ddac-4e72-9f32-aff82d073d08\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.830988 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e87a63-ddac-4e72-9f32-aff82d073d08-serving-cert\") pod \"99e87a63-ddac-4e72-9f32-aff82d073d08\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.831013 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-config\") pod \"99e87a63-ddac-4e72-9f32-aff82d073d08\" (UID: \"99e87a63-ddac-4e72-9f32-aff82d073d08\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.831035 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-client-ca\") pod \"9999ca00-de82-4451-bfe9-c216be6edd43\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.831053 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6nkq\" (UniqueName: \"kubernetes.io/projected/9999ca00-de82-4451-bfe9-c216be6edd43-kube-api-access-l6nkq\") pod \"9999ca00-de82-4451-bfe9-c216be6edd43\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.831079 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9999ca00-de82-4451-bfe9-c216be6edd43-serving-cert\") pod \"9999ca00-de82-4451-bfe9-c216be6edd43\" (UID: \"9999ca00-de82-4451-bfe9-c216be6edd43\") " Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.831971 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-client-ca" (OuterVolumeSpecName: "client-ca") pod "9999ca00-de82-4451-bfe9-c216be6edd43" (UID: "9999ca00-de82-4451-bfe9-c216be6edd43"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.832010 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-config" (OuterVolumeSpecName: "config") pod "9999ca00-de82-4451-bfe9-c216be6edd43" (UID: "9999ca00-de82-4451-bfe9-c216be6edd43"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.832004 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-client-ca" (OuterVolumeSpecName: "client-ca") pod "99e87a63-ddac-4e72-9f32-aff82d073d08" (UID: "99e87a63-ddac-4e72-9f32-aff82d073d08"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.832037 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "99e87a63-ddac-4e72-9f32-aff82d073d08" (UID: "99e87a63-ddac-4e72-9f32-aff82d073d08"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.832084 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-config" (OuterVolumeSpecName: "config") pod "99e87a63-ddac-4e72-9f32-aff82d073d08" (UID: "99e87a63-ddac-4e72-9f32-aff82d073d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.837402 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9999ca00-de82-4451-bfe9-c216be6edd43-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9999ca00-de82-4451-bfe9-c216be6edd43" (UID: "9999ca00-de82-4451-bfe9-c216be6edd43"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.837462 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99e87a63-ddac-4e72-9f32-aff82d073d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "99e87a63-ddac-4e72-9f32-aff82d073d08" (UID: "99e87a63-ddac-4e72-9f32-aff82d073d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.838333 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9999ca00-de82-4451-bfe9-c216be6edd43-kube-api-access-l6nkq" (OuterVolumeSpecName: "kube-api-access-l6nkq") pod "9999ca00-de82-4451-bfe9-c216be6edd43" (UID: "9999ca00-de82-4451-bfe9-c216be6edd43"). InnerVolumeSpecName "kube-api-access-l6nkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.841671 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99e87a63-ddac-4e72-9f32-aff82d073d08-kube-api-access-22tqd" (OuterVolumeSpecName: "kube-api-access-22tqd") pod "99e87a63-ddac-4e72-9f32-aff82d073d08" (UID: "99e87a63-ddac-4e72-9f32-aff82d073d08"). InnerVolumeSpecName "kube-api-access-22tqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.882833 4911 generic.go:334] "Generic (PLEG): container finished" podID="99e87a63-ddac-4e72-9f32-aff82d073d08" containerID="bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc" exitCode=0 Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.882896 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" event={"ID":"99e87a63-ddac-4e72-9f32-aff82d073d08","Type":"ContainerDied","Data":"bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc"} Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.882922 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" event={"ID":"99e87a63-ddac-4e72-9f32-aff82d073d08","Type":"ContainerDied","Data":"4d7fbef068f0156b6818e0092184cefd8ba13748e77c3eb184a8e5820c38fc35"} Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.882938 4911 scope.go:117] "RemoveContainer" containerID="bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.883033 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p5p9k" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.887952 4911 generic.go:334] "Generic (PLEG): container finished" podID="9999ca00-de82-4451-bfe9-c216be6edd43" containerID="3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6" exitCode=0 Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.887996 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" event={"ID":"9999ca00-de82-4451-bfe9-c216be6edd43","Type":"ContainerDied","Data":"3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6"} Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.888022 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" event={"ID":"9999ca00-de82-4451-bfe9-c216be6edd43","Type":"ContainerDied","Data":"7b9c8cf9dfe9ea2094b74f461fa392da2cd553b950ec9c809ff56f49cda9bfa8"} Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.888073 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.899849 4911 scope.go:117] "RemoveContainer" containerID="bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc" Jun 06 09:23:54 crc kubenswrapper[4911]: E0606 09:23:54.900500 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc\": container with ID starting with bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc not found: ID does not exist" containerID="bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.900533 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc"} err="failed to get container status \"bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc\": rpc error: code = NotFound desc = could not find container \"bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc\": container with ID starting with bb6350a6930e75dd0f31f62aca639b506748719eca35df4e1f9b45bea660cefc not found: ID does not exist" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.900554 4911 scope.go:117] "RemoveContainer" containerID="3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.917212 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p5p9k"] Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.918783 4911 scope.go:117] "RemoveContainer" containerID="3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6" Jun 06 09:23:54 crc kubenswrapper[4911]: E0606 09:23:54.920201 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6\": container with ID starting with 3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6 not found: ID does not exist" containerID="3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.920262 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6"} err="failed to get container status \"3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6\": rpc error: code = NotFound desc = could not find container \"3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6\": container with ID starting with 3ac0a8a67ba32daf09d262062282434bd891456203c1f4c00063865a1e4c4ba6 not found: ID does not exist" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.920757 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p5p9k"] Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.929239 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb"] Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931870 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-client-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931908 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22tqd\" (UniqueName: \"kubernetes.io/projected/99e87a63-ddac-4e72-9f32-aff82d073d08-kube-api-access-22tqd\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931924 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931937 4911 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931947 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e87a63-ddac-4e72-9f32-aff82d073d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931959 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e87a63-ddac-4e72-9f32-aff82d073d08-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931969 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6nkq\" (UniqueName: \"kubernetes.io/projected/9999ca00-de82-4451-bfe9-c216be6edd43-kube-api-access-l6nkq\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931979 4911 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9999ca00-de82-4451-bfe9-c216be6edd43-client-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.931989 4911 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9999ca00-de82-4451-bfe9-c216be6edd43-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:23:54 crc kubenswrapper[4911]: I0606 09:23:54.933168 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bsqpb"] Jun 06 09:23:55 crc kubenswrapper[4911]: I0606 09:23:55.954483 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9999ca00-de82-4451-bfe9-c216be6edd43" path="/var/lib/kubelet/pods/9999ca00-de82-4451-bfe9-c216be6edd43/volumes" Jun 06 09:23:55 crc kubenswrapper[4911]: I0606 09:23:55.955477 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99e87a63-ddac-4e72-9f32-aff82d073d08" path="/var/lib/kubelet/pods/99e87a63-ddac-4e72-9f32-aff82d073d08/volumes" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.640425 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29"] Jun 06 09:23:56 crc kubenswrapper[4911]: E0606 09:23:56.640702 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1aaa6f6-d93b-41e4-af48-fb3eece73a31" containerName="container-00" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.640717 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1aaa6f6-d93b-41e4-af48-fb3eece73a31" containerName="container-00" Jun 06 09:23:56 crc kubenswrapper[4911]: E0606 09:23:56.640727 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99e87a63-ddac-4e72-9f32-aff82d073d08" containerName="controller-manager" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.640734 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="99e87a63-ddac-4e72-9f32-aff82d073d08" containerName="controller-manager" Jun 06 09:23:56 crc kubenswrapper[4911]: E0606 09:23:56.640746 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9999ca00-de82-4451-bfe9-c216be6edd43" containerName="route-controller-manager" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.640753 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9999ca00-de82-4451-bfe9-c216be6edd43" containerName="route-controller-manager" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.640860 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="99e87a63-ddac-4e72-9f32-aff82d073d08" containerName="controller-manager" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.640874 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9999ca00-de82-4451-bfe9-c216be6edd43" containerName="route-controller-manager" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.640884 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1aaa6f6-d93b-41e4-af48-fb3eece73a31" containerName="container-00" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.641363 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.643211 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-664758989-tpt8k"] Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.643894 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.644141 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.645057 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.645059 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.645666 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.649364 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.649527 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.649677 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.649961 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.650173 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.650513 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.653782 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654428 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654613 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29"] Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654805 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chrq8\" (UniqueName: \"kubernetes.io/projected/0d8af6be-c7a8-44d3-a73b-b49d099127b5-kube-api-access-chrq8\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654857 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8af6be-c7a8-44d3-a73b-b49d099127b5-config\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654874 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d8af6be-c7a8-44d3-a73b-b49d099127b5-serving-cert\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654904 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4snc\" (UniqueName: \"kubernetes.io/projected/00307389-3fe1-4759-8d9e-dcd202be4210-kube-api-access-r4snc\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654930 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00307389-3fe1-4759-8d9e-dcd202be4210-serving-cert\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654957 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0d8af6be-c7a8-44d3-a73b-b49d099127b5-client-ca\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.654988 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-config\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.655002 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-proxy-ca-bundles\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.655021 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-client-ca\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.657169 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-664758989-tpt8k"] Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.657457 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756399 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0d8af6be-c7a8-44d3-a73b-b49d099127b5-client-ca\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756601 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-config\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756653 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-proxy-ca-bundles\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756709 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-client-ca\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756783 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chrq8\" (UniqueName: \"kubernetes.io/projected/0d8af6be-c7a8-44d3-a73b-b49d099127b5-kube-api-access-chrq8\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756836 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d8af6be-c7a8-44d3-a73b-b49d099127b5-serving-cert\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756868 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8af6be-c7a8-44d3-a73b-b49d099127b5-config\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756936 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4snc\" (UniqueName: \"kubernetes.io/projected/00307389-3fe1-4759-8d9e-dcd202be4210-kube-api-access-r4snc\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.756983 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00307389-3fe1-4759-8d9e-dcd202be4210-serving-cert\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.758482 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0d8af6be-c7a8-44d3-a73b-b49d099127b5-client-ca\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.758813 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-proxy-ca-bundles\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.759132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8af6be-c7a8-44d3-a73b-b49d099127b5-config\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.759506 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-client-ca\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.759863 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00307389-3fe1-4759-8d9e-dcd202be4210-config\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.766881 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d8af6be-c7a8-44d3-a73b-b49d099127b5-serving-cert\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.775755 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00307389-3fe1-4759-8d9e-dcd202be4210-serving-cert\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.778956 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4snc\" (UniqueName: \"kubernetes.io/projected/00307389-3fe1-4759-8d9e-dcd202be4210-kube-api-access-r4snc\") pod \"controller-manager-664758989-tpt8k\" (UID: \"00307389-3fe1-4759-8d9e-dcd202be4210\") " pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.784432 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chrq8\" (UniqueName: \"kubernetes.io/projected/0d8af6be-c7a8-44d3-a73b-b49d099127b5-kube-api-access-chrq8\") pod \"route-controller-manager-dd68f8c65-xlp29\" (UID: \"0d8af6be-c7a8-44d3-a73b-b49d099127b5\") " pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.962881 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:56 crc kubenswrapper[4911]: I0606 09:23:56.977005 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.227503 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29"] Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.387968 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-664758989-tpt8k"] Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.906508 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-664758989-tpt8k" event={"ID":"00307389-3fe1-4759-8d9e-dcd202be4210","Type":"ContainerStarted","Data":"92ec57e1710de1e92f51c16d448b8da6aa46b1096062a24642ab591b3d7a2850"} Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.906819 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-664758989-tpt8k" event={"ID":"00307389-3fe1-4759-8d9e-dcd202be4210","Type":"ContainerStarted","Data":"1a939b1733eb0412b78210c72797d74191b74e6e2a6b9890275d04c6df9e0281"} Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.906838 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.908137 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" event={"ID":"0d8af6be-c7a8-44d3-a73b-b49d099127b5","Type":"ContainerStarted","Data":"3c5011bc6abb5ada4035441da0b66120cf0a68cf2dac24c1ae6522c1c2552634"} Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.908178 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" event={"ID":"0d8af6be-c7a8-44d3-a73b-b49d099127b5","Type":"ContainerStarted","Data":"c90208debfb7a3f67b4f76b83b94d1e8606ca0b358a4dbc250ac8f41c99aa119"} Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.908377 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.911556 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-664758989-tpt8k" Jun 06 09:23:57 crc kubenswrapper[4911]: I0606 09:23:57.946193 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-664758989-tpt8k" podStartSLOduration=3.946169464 podStartE2EDuration="3.946169464s" podCreationTimestamp="2025-06-06 09:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:23:57.926855545 +0000 UTC m=+649.202281098" watchObservedRunningTime="2025-06-06 09:23:57.946169464 +0000 UTC m=+649.221595017" Jun 06 09:23:58 crc kubenswrapper[4911]: I0606 09:23:58.021526 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" Jun 06 09:23:58 crc kubenswrapper[4911]: I0606 09:23:58.038501 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-dd68f8c65-xlp29" podStartSLOduration=4.038479495 podStartE2EDuration="4.038479495s" podCreationTimestamp="2025-06-06 09:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:23:57.976570728 +0000 UTC m=+649.251996271" watchObservedRunningTime="2025-06-06 09:23:58.038479495 +0000 UTC m=+649.313905038" Jun 06 09:24:01 crc kubenswrapper[4911]: I0606 09:24:01.870665 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-gcsg4"] Jun 06 09:24:01 crc kubenswrapper[4911]: I0606 09:24:01.871630 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gcsg4" Jun 06 09:24:01 crc kubenswrapper[4911]: I0606 09:24:01.927773 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5p27\" (UniqueName: \"kubernetes.io/projected/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-kube-api-access-v5p27\") pod \"crc-debug-gcsg4\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " pod="openstack/crc-debug-gcsg4" Jun 06 09:24:01 crc kubenswrapper[4911]: I0606 09:24:01.927890 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-host\") pod \"crc-debug-gcsg4\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " pod="openstack/crc-debug-gcsg4" Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.029069 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5p27\" (UniqueName: \"kubernetes.io/projected/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-kube-api-access-v5p27\") pod \"crc-debug-gcsg4\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " pod="openstack/crc-debug-gcsg4" Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.029177 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-host\") pod \"crc-debug-gcsg4\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " pod="openstack/crc-debug-gcsg4" Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.030146 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-host\") pod \"crc-debug-gcsg4\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " pod="openstack/crc-debug-gcsg4" Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.058286 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5p27\" (UniqueName: \"kubernetes.io/projected/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-kube-api-access-v5p27\") pod \"crc-debug-gcsg4\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " pod="openstack/crc-debug-gcsg4" Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.193580 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gcsg4" Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.941248 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-gcsg4" event={"ID":"80c4a6cc-d9b2-4398-96d8-e790a7205dcc","Type":"ContainerStarted","Data":"78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9"} Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.941579 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-gcsg4" event={"ID":"80c4a6cc-d9b2-4398-96d8-e790a7205dcc","Type":"ContainerStarted","Data":"39c0bb549d51b1fe5147b0658c1740a9b9172e205379cb09f86a3a13f73a9895"} Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.958610 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-gcsg4" podStartSLOduration=1.958588292 podStartE2EDuration="1.958588292s" podCreationTimestamp="2025-06-06 09:24:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:24:02.95505513 +0000 UTC m=+654.230480673" watchObservedRunningTime="2025-06-06 09:24:02.958588292 +0000 UTC m=+654.234013835" Jun 06 09:24:02 crc kubenswrapper[4911]: I0606 09:24:02.976737 4911 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.532053 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k"] Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.533382 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.535480 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.541860 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k"] Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.658499 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-util\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.658589 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpntz\" (UniqueName: \"kubernetes.io/projected/7c2e9c04-daa4-4e63-8e6f-952d8118802b-kube-api-access-kpntz\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.658664 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-bundle\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.761772 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-bundle\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.761861 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-util\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.761945 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpntz\" (UniqueName: \"kubernetes.io/projected/7c2e9c04-daa4-4e63-8e6f-952d8118802b-kube-api-access-kpntz\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.762500 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-bundle\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.762515 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-util\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.783049 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpntz\" (UniqueName: \"kubernetes.io/projected/7c2e9c04-daa4-4e63-8e6f-952d8118802b-kube-api-access-kpntz\") pod \"6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:04 crc kubenswrapper[4911]: I0606 09:24:04.851937 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:05 crc kubenswrapper[4911]: I0606 09:24:05.273566 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k"] Jun 06 09:24:05 crc kubenswrapper[4911]: W0606 09:24:05.281854 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c2e9c04_daa4_4e63_8e6f_952d8118802b.slice/crio-8b83a8a125b5b58e5f0c4bb9a87f20491345d44f8bbcd2f5650d364f11c4700a WatchSource:0}: Error finding container 8b83a8a125b5b58e5f0c4bb9a87f20491345d44f8bbcd2f5650d364f11c4700a: Status 404 returned error can't find the container with id 8b83a8a125b5b58e5f0c4bb9a87f20491345d44f8bbcd2f5650d364f11c4700a Jun 06 09:24:05 crc kubenswrapper[4911]: I0606 09:24:05.959546 4911 generic.go:334] "Generic (PLEG): container finished" podID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerID="75c9873f4b9e8e72fb085b95c50bd9dde9bea1ceffe8f8dde49c95a30605d5e0" exitCode=0 Jun 06 09:24:05 crc kubenswrapper[4911]: I0606 09:24:05.959673 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" event={"ID":"7c2e9c04-daa4-4e63-8e6f-952d8118802b","Type":"ContainerDied","Data":"75c9873f4b9e8e72fb085b95c50bd9dde9bea1ceffe8f8dde49c95a30605d5e0"} Jun 06 09:24:05 crc kubenswrapper[4911]: I0606 09:24:05.959718 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" event={"ID":"7c2e9c04-daa4-4e63-8e6f-952d8118802b","Type":"ContainerStarted","Data":"8b83a8a125b5b58e5f0c4bb9a87f20491345d44f8bbcd2f5650d364f11c4700a"} Jun 06 09:24:06 crc kubenswrapper[4911]: I0606 09:24:06.891445 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mm5sw"] Jun 06 09:24:06 crc kubenswrapper[4911]: I0606 09:24:06.893122 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:06 crc kubenswrapper[4911]: I0606 09:24:06.907899 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mm5sw"] Jun 06 09:24:06 crc kubenswrapper[4911]: I0606 09:24:06.994841 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grcgt\" (UniqueName: \"kubernetes.io/projected/6e20b646-7248-4090-ba01-f4db926290dc-kube-api-access-grcgt\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:06 crc kubenswrapper[4911]: I0606 09:24:06.994915 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-catalog-content\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:06 crc kubenswrapper[4911]: I0606 09:24:06.995054 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-utilities\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:07 crc kubenswrapper[4911]: I0606 09:24:07.096432 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grcgt\" (UniqueName: \"kubernetes.io/projected/6e20b646-7248-4090-ba01-f4db926290dc-kube-api-access-grcgt\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:07 crc kubenswrapper[4911]: I0606 09:24:07.096497 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-catalog-content\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:07 crc kubenswrapper[4911]: I0606 09:24:07.096574 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-utilities\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:07 crc kubenswrapper[4911]: I0606 09:24:07.096996 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-utilities\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:07 crc kubenswrapper[4911]: I0606 09:24:07.097136 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-catalog-content\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:07 crc kubenswrapper[4911]: I0606 09:24:07.117990 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grcgt\" (UniqueName: \"kubernetes.io/projected/6e20b646-7248-4090-ba01-f4db926290dc-kube-api-access-grcgt\") pod \"redhat-operators-mm5sw\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:07 crc kubenswrapper[4911]: I0606 09:24:07.215538 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:08 crc kubenswrapper[4911]: I0606 09:24:08.201344 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mm5sw"] Jun 06 09:24:08 crc kubenswrapper[4911]: I0606 09:24:08.975867 4911 generic.go:334] "Generic (PLEG): container finished" podID="6e20b646-7248-4090-ba01-f4db926290dc" containerID="cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b" exitCode=0 Jun 06 09:24:08 crc kubenswrapper[4911]: I0606 09:24:08.975946 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mm5sw" event={"ID":"6e20b646-7248-4090-ba01-f4db926290dc","Type":"ContainerDied","Data":"cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b"} Jun 06 09:24:08 crc kubenswrapper[4911]: I0606 09:24:08.975982 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mm5sw" event={"ID":"6e20b646-7248-4090-ba01-f4db926290dc","Type":"ContainerStarted","Data":"dcd392c3916cc5edfd6b63829c3ae4948d9bd24fd767a606857e0fb22bb80eda"} Jun 06 09:24:08 crc kubenswrapper[4911]: I0606 09:24:08.978015 4911 generic.go:334] "Generic (PLEG): container finished" podID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerID="5927ac1511b6abed0a9a226bb0171a55784b435044d76f38331a4f6a3fa5bc4b" exitCode=0 Jun 06 09:24:08 crc kubenswrapper[4911]: I0606 09:24:08.978047 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" event={"ID":"7c2e9c04-daa4-4e63-8e6f-952d8118802b","Type":"ContainerDied","Data":"5927ac1511b6abed0a9a226bb0171a55784b435044d76f38331a4f6a3fa5bc4b"} Jun 06 09:24:09 crc kubenswrapper[4911]: I0606 09:24:09.985861 4911 generic.go:334] "Generic (PLEG): container finished" podID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerID="56d82652831596a68a6eb12e027545811f03d0aa2956a940ee2d26a8e21c4e11" exitCode=0 Jun 06 09:24:09 crc kubenswrapper[4911]: I0606 09:24:09.985957 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" event={"ID":"7c2e9c04-daa4-4e63-8e6f-952d8118802b","Type":"ContainerDied","Data":"56d82652831596a68a6eb12e027545811f03d0aa2956a940ee2d26a8e21c4e11"} Jun 06 09:24:10 crc kubenswrapper[4911]: I0606 09:24:10.994993 4911 generic.go:334] "Generic (PLEG): container finished" podID="6e20b646-7248-4090-ba01-f4db926290dc" containerID="651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83" exitCode=0 Jun 06 09:24:10 crc kubenswrapper[4911]: I0606 09:24:10.995198 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mm5sw" event={"ID":"6e20b646-7248-4090-ba01-f4db926290dc","Type":"ContainerDied","Data":"651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83"} Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.351285 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.552110 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpntz\" (UniqueName: \"kubernetes.io/projected/7c2e9c04-daa4-4e63-8e6f-952d8118802b-kube-api-access-kpntz\") pod \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.552236 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-bundle\") pod \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.552269 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-util\") pod \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\" (UID: \"7c2e9c04-daa4-4e63-8e6f-952d8118802b\") " Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.554876 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-bundle" (OuterVolumeSpecName: "bundle") pod "7c2e9c04-daa4-4e63-8e6f-952d8118802b" (UID: "7c2e9c04-daa4-4e63-8e6f-952d8118802b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.561075 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c2e9c04-daa4-4e63-8e6f-952d8118802b-kube-api-access-kpntz" (OuterVolumeSpecName: "kube-api-access-kpntz") pod "7c2e9c04-daa4-4e63-8e6f-952d8118802b" (UID: "7c2e9c04-daa4-4e63-8e6f-952d8118802b"). InnerVolumeSpecName "kube-api-access-kpntz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.564699 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-util" (OuterVolumeSpecName: "util") pod "7c2e9c04-daa4-4e63-8e6f-952d8118802b" (UID: "7c2e9c04-daa4-4e63-8e6f-952d8118802b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.654882 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpntz\" (UniqueName: \"kubernetes.io/projected/7c2e9c04-daa4-4e63-8e6f-952d8118802b-kube-api-access-kpntz\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.654960 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:11 crc kubenswrapper[4911]: I0606 09:24:11.654983 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7c2e9c04-daa4-4e63-8e6f-952d8118802b-util\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.003122 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" event={"ID":"7c2e9c04-daa4-4e63-8e6f-952d8118802b","Type":"ContainerDied","Data":"8b83a8a125b5b58e5f0c4bb9a87f20491345d44f8bbcd2f5650d364f11c4700a"} Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.003480 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b83a8a125b5b58e5f0c4bb9a87f20491345d44f8bbcd2f5650d364f11c4700a" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.003173 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.007224 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mm5sw" event={"ID":"6e20b646-7248-4090-ba01-f4db926290dc","Type":"ContainerStarted","Data":"414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4"} Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.029000 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mm5sw" podStartSLOduration=3.236670817 podStartE2EDuration="6.028975087s" podCreationTimestamp="2025-06-06 09:24:06 +0000 UTC" firstStartedPulling="2025-06-06 09:24:08.977729108 +0000 UTC m=+660.253154651" lastFinishedPulling="2025-06-06 09:24:11.770033378 +0000 UTC m=+663.045458921" observedRunningTime="2025-06-06 09:24:12.024140422 +0000 UTC m=+663.299565995" watchObservedRunningTime="2025-06-06 09:24:12.028975087 +0000 UTC m=+663.304400630" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.589858 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-gcsg4"] Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.590145 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-gcsg4" podUID="80c4a6cc-d9b2-4398-96d8-e790a7205dcc" containerName="container-00" containerID="cri-o://78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9" gracePeriod=2 Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.594902 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-gcsg4"] Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.691932 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gcsg4" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.868436 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-host\") pod \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.868548 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-host" (OuterVolumeSpecName: "host") pod "80c4a6cc-d9b2-4398-96d8-e790a7205dcc" (UID: "80c4a6cc-d9b2-4398-96d8-e790a7205dcc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.868631 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5p27\" (UniqueName: \"kubernetes.io/projected/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-kube-api-access-v5p27\") pod \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\" (UID: \"80c4a6cc-d9b2-4398-96d8-e790a7205dcc\") " Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.868902 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.873693 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-kube-api-access-v5p27" (OuterVolumeSpecName: "kube-api-access-v5p27") pod "80c4a6cc-d9b2-4398-96d8-e790a7205dcc" (UID: "80c4a6cc-d9b2-4398-96d8-e790a7205dcc"). InnerVolumeSpecName "kube-api-access-v5p27". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:24:12 crc kubenswrapper[4911]: I0606 09:24:12.970335 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5p27\" (UniqueName: \"kubernetes.io/projected/80c4a6cc-d9b2-4398-96d8-e790a7205dcc-kube-api-access-v5p27\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:13 crc kubenswrapper[4911]: I0606 09:24:13.014122 4911 generic.go:334] "Generic (PLEG): container finished" podID="80c4a6cc-d9b2-4398-96d8-e790a7205dcc" containerID="78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9" exitCode=0 Jun 06 09:24:13 crc kubenswrapper[4911]: I0606 09:24:13.014164 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gcsg4" Jun 06 09:24:13 crc kubenswrapper[4911]: I0606 09:24:13.014232 4911 scope.go:117] "RemoveContainer" containerID="78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9" Jun 06 09:24:13 crc kubenswrapper[4911]: I0606 09:24:13.030931 4911 scope.go:117] "RemoveContainer" containerID="78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9" Jun 06 09:24:13 crc kubenswrapper[4911]: E0606 09:24:13.031330 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9\": container with ID starting with 78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9 not found: ID does not exist" containerID="78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9" Jun 06 09:24:13 crc kubenswrapper[4911]: I0606 09:24:13.031374 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9"} err="failed to get container status \"78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9\": rpc error: code = NotFound desc = could not find container \"78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9\": container with ID starting with 78457c68d3a629651019d146918e6b6ebbed6d91460de965809ea9b7386e71c9 not found: ID does not exist" Jun 06 09:24:13 crc kubenswrapper[4911]: I0606 09:24:13.957473 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80c4a6cc-d9b2-4398-96d8-e790a7205dcc" path="/var/lib/kubelet/pods/80c4a6cc-d9b2-4398-96d8-e790a7205dcc/volumes" Jun 06 09:24:17 crc kubenswrapper[4911]: I0606 09:24:17.215889 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:17 crc kubenswrapper[4911]: I0606 09:24:17.216250 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:17 crc kubenswrapper[4911]: I0606 09:24:17.254552 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:18 crc kubenswrapper[4911]: I0606 09:24:18.086421 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:19 crc kubenswrapper[4911]: I0606 09:24:19.679544 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mm5sw"] Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.061739 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mm5sw" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="registry-server" containerID="cri-o://414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4" gracePeriod=2 Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.529709 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.684994 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grcgt\" (UniqueName: \"kubernetes.io/projected/6e20b646-7248-4090-ba01-f4db926290dc-kube-api-access-grcgt\") pod \"6e20b646-7248-4090-ba01-f4db926290dc\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.685165 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-catalog-content\") pod \"6e20b646-7248-4090-ba01-f4db926290dc\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.685235 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-utilities\") pod \"6e20b646-7248-4090-ba01-f4db926290dc\" (UID: \"6e20b646-7248-4090-ba01-f4db926290dc\") " Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.686137 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-utilities" (OuterVolumeSpecName: "utilities") pod "6e20b646-7248-4090-ba01-f4db926290dc" (UID: "6e20b646-7248-4090-ba01-f4db926290dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.691125 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e20b646-7248-4090-ba01-f4db926290dc-kube-api-access-grcgt" (OuterVolumeSpecName: "kube-api-access-grcgt") pod "6e20b646-7248-4090-ba01-f4db926290dc" (UID: "6e20b646-7248-4090-ba01-f4db926290dc"). InnerVolumeSpecName "kube-api-access-grcgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.788825 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grcgt\" (UniqueName: \"kubernetes.io/projected/6e20b646-7248-4090-ba01-f4db926290dc-kube-api-access-grcgt\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.788876 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.941197 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e20b646-7248-4090-ba01-f4db926290dc" (UID: "6e20b646-7248-4090-ba01-f4db926290dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:24:20 crc kubenswrapper[4911]: I0606 09:24:20.991150 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e20b646-7248-4090-ba01-f4db926290dc-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.069220 4911 generic.go:334] "Generic (PLEG): container finished" podID="6e20b646-7248-4090-ba01-f4db926290dc" containerID="414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4" exitCode=0 Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.069269 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mm5sw" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.069288 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mm5sw" event={"ID":"6e20b646-7248-4090-ba01-f4db926290dc","Type":"ContainerDied","Data":"414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4"} Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.069593 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mm5sw" event={"ID":"6e20b646-7248-4090-ba01-f4db926290dc","Type":"ContainerDied","Data":"dcd392c3916cc5edfd6b63829c3ae4948d9bd24fd767a606857e0fb22bb80eda"} Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.069617 4911 scope.go:117] "RemoveContainer" containerID="414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.088237 4911 scope.go:117] "RemoveContainer" containerID="651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.098124 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mm5sw"] Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.105632 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mm5sw"] Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.123452 4911 scope.go:117] "RemoveContainer" containerID="cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.135383 4911 scope.go:117] "RemoveContainer" containerID="414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4" Jun 06 09:24:21 crc kubenswrapper[4911]: E0606 09:24:21.135833 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4\": container with ID starting with 414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4 not found: ID does not exist" containerID="414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.135868 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4"} err="failed to get container status \"414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4\": rpc error: code = NotFound desc = could not find container \"414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4\": container with ID starting with 414d0ba7a81bac7af50f4f56e98d678579be5c5ab415742bce00e67eeaf8a6c4 not found: ID does not exist" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.135895 4911 scope.go:117] "RemoveContainer" containerID="651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83" Jun 06 09:24:21 crc kubenswrapper[4911]: E0606 09:24:21.136170 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83\": container with ID starting with 651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83 not found: ID does not exist" containerID="651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.136193 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83"} err="failed to get container status \"651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83\": rpc error: code = NotFound desc = could not find container \"651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83\": container with ID starting with 651f014d2d61a40f82754c7e165971ab59b29fd55a7fb418bfcb348d3f7a6c83 not found: ID does not exist" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.136211 4911 scope.go:117] "RemoveContainer" containerID="cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b" Jun 06 09:24:21 crc kubenswrapper[4911]: E0606 09:24:21.136417 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b\": container with ID starting with cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b not found: ID does not exist" containerID="cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.136440 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b"} err="failed to get container status \"cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b\": rpc error: code = NotFound desc = could not find container \"cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b\": container with ID starting with cec0909b7da69b167c2a03cd3e7351fce143cd648207508439649cc74181527b not found: ID does not exist" Jun 06 09:24:21 crc kubenswrapper[4911]: I0606 09:24:21.956060 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e20b646-7248-4090-ba01-f4db926290dc" path="/var/lib/kubelet/pods/6e20b646-7248-4090-ba01-f4db926290dc/volumes" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.715981 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-58775bb567-66nbp"] Jun 06 09:24:22 crc kubenswrapper[4911]: E0606 09:24:22.716196 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c4a6cc-d9b2-4398-96d8-e790a7205dcc" containerName="container-00" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716209 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c4a6cc-d9b2-4398-96d8-e790a7205dcc" containerName="container-00" Jun 06 09:24:22 crc kubenswrapper[4911]: E0606 09:24:22.716221 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerName="pull" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716228 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerName="pull" Jun 06 09:24:22 crc kubenswrapper[4911]: E0606 09:24:22.716235 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="extract-utilities" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716242 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="extract-utilities" Jun 06 09:24:22 crc kubenswrapper[4911]: E0606 09:24:22.716250 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerName="extract" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716256 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerName="extract" Jun 06 09:24:22 crc kubenswrapper[4911]: E0606 09:24:22.716272 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="extract-content" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716277 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="extract-content" Jun 06 09:24:22 crc kubenswrapper[4911]: E0606 09:24:22.716285 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="registry-server" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716290 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="registry-server" Jun 06 09:24:22 crc kubenswrapper[4911]: E0606 09:24:22.716298 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerName="util" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716304 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerName="util" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716386 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e20b646-7248-4090-ba01-f4db926290dc" containerName="registry-server" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716398 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="80c4a6cc-d9b2-4398-96d8-e790a7205dcc" containerName="container-00" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716411 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c2e9c04-daa4-4e63-8e6f-952d8118802b" containerName="extract" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.716771 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.722546 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.722863 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.725267 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-ssr7f" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.725228 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.727204 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.755066 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58775bb567-66nbp"] Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.813882 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4967770d-b296-42f3-855d-46b34c89a0e4-webhook-cert\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.813945 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4967770d-b296-42f3-855d-46b34c89a0e4-apiservice-cert\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.814241 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn54h\" (UniqueName: \"kubernetes.io/projected/4967770d-b296-42f3-855d-46b34c89a0e4-kube-api-access-mn54h\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.915220 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4967770d-b296-42f3-855d-46b34c89a0e4-webhook-cert\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.915277 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4967770d-b296-42f3-855d-46b34c89a0e4-apiservice-cert\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.915360 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn54h\" (UniqueName: \"kubernetes.io/projected/4967770d-b296-42f3-855d-46b34c89a0e4-kube-api-access-mn54h\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.921312 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-547795769d-5msxn"] Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.922215 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.924515 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.924707 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-7jbb9" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.925786 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4967770d-b296-42f3-855d-46b34c89a0e4-webhook-cert\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.926703 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.935879 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4967770d-b296-42f3-855d-46b34c89a0e4-apiservice-cert\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.936462 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn54h\" (UniqueName: \"kubernetes.io/projected/4967770d-b296-42f3-855d-46b34c89a0e4-kube-api-access-mn54h\") pod \"metallb-operator-controller-manager-58775bb567-66nbp\" (UID: \"4967770d-b296-42f3-855d-46b34c89a0e4\") " pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:22 crc kubenswrapper[4911]: I0606 09:24:22.940394 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-547795769d-5msxn"] Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.016576 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9b6cf40e-e3a1-409d-a796-8a60840c0a07-webhook-cert\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.016749 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbhn6\" (UniqueName: \"kubernetes.io/projected/9b6cf40e-e3a1-409d-a796-8a60840c0a07-kube-api-access-lbhn6\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.016821 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9b6cf40e-e3a1-409d-a796-8a60840c0a07-apiservice-cert\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.032961 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.117881 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9b6cf40e-e3a1-409d-a796-8a60840c0a07-apiservice-cert\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.117971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9b6cf40e-e3a1-409d-a796-8a60840c0a07-webhook-cert\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.118021 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbhn6\" (UniqueName: \"kubernetes.io/projected/9b6cf40e-e3a1-409d-a796-8a60840c0a07-kube-api-access-lbhn6\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.121679 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9b6cf40e-e3a1-409d-a796-8a60840c0a07-apiservice-cert\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.124238 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9b6cf40e-e3a1-409d-a796-8a60840c0a07-webhook-cert\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.137318 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbhn6\" (UniqueName: \"kubernetes.io/projected/9b6cf40e-e3a1-409d-a796-8a60840c0a07-kube-api-access-lbhn6\") pod \"metallb-operator-webhook-server-547795769d-5msxn\" (UID: \"9b6cf40e-e3a1-409d-a796-8a60840c0a07\") " pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.279020 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.470811 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58775bb567-66nbp"] Jun 06 09:24:23 crc kubenswrapper[4911]: W0606 09:24:23.474593 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4967770d_b296_42f3_855d_46b34c89a0e4.slice/crio-97815cc7adc4c88df5ea36d2a8ff190761618d05c0c759efcea8ecb3d5d4220e WatchSource:0}: Error finding container 97815cc7adc4c88df5ea36d2a8ff190761618d05c0c759efcea8ecb3d5d4220e: Status 404 returned error can't find the container with id 97815cc7adc4c88df5ea36d2a8ff190761618d05c0c759efcea8ecb3d5d4220e Jun 06 09:24:23 crc kubenswrapper[4911]: I0606 09:24:23.718392 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-547795769d-5msxn"] Jun 06 09:24:23 crc kubenswrapper[4911]: W0606 09:24:23.727626 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b6cf40e_e3a1_409d_a796_8a60840c0a07.slice/crio-828e16964719ed14a07bb54476fee1951222d909697e1d7d3e110fa3fed8ed17 WatchSource:0}: Error finding container 828e16964719ed14a07bb54476fee1951222d909697e1d7d3e110fa3fed8ed17: Status 404 returned error can't find the container with id 828e16964719ed14a07bb54476fee1951222d909697e1d7d3e110fa3fed8ed17 Jun 06 09:24:24 crc kubenswrapper[4911]: I0606 09:24:24.091034 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" event={"ID":"4967770d-b296-42f3-855d-46b34c89a0e4","Type":"ContainerStarted","Data":"97815cc7adc4c88df5ea36d2a8ff190761618d05c0c759efcea8ecb3d5d4220e"} Jun 06 09:24:24 crc kubenswrapper[4911]: I0606 09:24:24.092250 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" event={"ID":"9b6cf40e-e3a1-409d-a796-8a60840c0a07","Type":"ContainerStarted","Data":"828e16964719ed14a07bb54476fee1951222d909697e1d7d3e110fa3fed8ed17"} Jun 06 09:24:28 crc kubenswrapper[4911]: I0606 09:24:28.128896 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" event={"ID":"9b6cf40e-e3a1-409d-a796-8a60840c0a07","Type":"ContainerStarted","Data":"e2d6ed4fb30b0a43e8bafb5e833ac4f52490b8bab63d2bc80e6d32db43c91bb4"} Jun 06 09:24:28 crc kubenswrapper[4911]: I0606 09:24:28.129560 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:28 crc kubenswrapper[4911]: I0606 09:24:28.130890 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" event={"ID":"4967770d-b296-42f3-855d-46b34c89a0e4","Type":"ContainerStarted","Data":"201e2f5b8cb3d305349ba02fa90ebb0cddff7c4eedf73d6f57f64b4baa9f379f"} Jun 06 09:24:28 crc kubenswrapper[4911]: I0606 09:24:28.160288 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" podStartSLOduration=2.063185706 podStartE2EDuration="6.160253912s" podCreationTimestamp="2025-06-06 09:24:22 +0000 UTC" firstStartedPulling="2025-06-06 09:24:23.730889254 +0000 UTC m=+675.006314807" lastFinishedPulling="2025-06-06 09:24:27.82795747 +0000 UTC m=+679.103383013" observedRunningTime="2025-06-06 09:24:28.153916769 +0000 UTC m=+679.429342312" watchObservedRunningTime="2025-06-06 09:24:28.160253912 +0000 UTC m=+679.435679455" Jun 06 09:24:29 crc kubenswrapper[4911]: I0606 09:24:29.138723 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:24:29 crc kubenswrapper[4911]: I0606 09:24:29.163079 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" podStartSLOduration=2.834545263 podStartE2EDuration="7.163051139s" podCreationTimestamp="2025-06-06 09:24:22 +0000 UTC" firstStartedPulling="2025-06-06 09:24:23.483199795 +0000 UTC m=+674.758625338" lastFinishedPulling="2025-06-06 09:24:27.811705671 +0000 UTC m=+679.087131214" observedRunningTime="2025-06-06 09:24:29.162203207 +0000 UTC m=+680.437628770" watchObservedRunningTime="2025-06-06 09:24:29.163051139 +0000 UTC m=+680.438476692" Jun 06 09:24:43 crc kubenswrapper[4911]: I0606 09:24:43.284136 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-547795769d-5msxn" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.465003 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rplh2"] Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.466788 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.485952 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rplh2"] Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.645925 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8nf8\" (UniqueName: \"kubernetes.io/projected/6de094f2-4a0c-4e9d-bf92-9668110af217-kube-api-access-f8nf8\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.646033 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-utilities\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.646123 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-catalog-content\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.747443 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-utilities\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.747544 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-catalog-content\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.747582 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8nf8\" (UniqueName: \"kubernetes.io/projected/6de094f2-4a0c-4e9d-bf92-9668110af217-kube-api-access-f8nf8\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.748030 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-utilities\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.748079 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-catalog-content\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.773120 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8nf8\" (UniqueName: \"kubernetes.io/projected/6de094f2-4a0c-4e9d-bf92-9668110af217-kube-api-access-f8nf8\") pod \"redhat-marketplace-rplh2\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:47 crc kubenswrapper[4911]: I0606 09:24:47.786326 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:48 crc kubenswrapper[4911]: I0606 09:24:48.183674 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rplh2"] Jun 06 09:24:48 crc kubenswrapper[4911]: I0606 09:24:48.245277 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rplh2" event={"ID":"6de094f2-4a0c-4e9d-bf92-9668110af217","Type":"ContainerStarted","Data":"8dfa8ac2e5b6d0cd89fed03d506f7e3baf22370430765a3cf026e2cb0b1df9c9"} Jun 06 09:24:49 crc kubenswrapper[4911]: I0606 09:24:49.251959 4911 generic.go:334] "Generic (PLEG): container finished" podID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerID="bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd" exitCode=0 Jun 06 09:24:49 crc kubenswrapper[4911]: I0606 09:24:49.252003 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rplh2" event={"ID":"6de094f2-4a0c-4e9d-bf92-9668110af217","Type":"ContainerDied","Data":"bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd"} Jun 06 09:24:50 crc kubenswrapper[4911]: I0606 09:24:50.258452 4911 generic.go:334] "Generic (PLEG): container finished" podID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerID="911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7" exitCode=0 Jun 06 09:24:50 crc kubenswrapper[4911]: I0606 09:24:50.258532 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rplh2" event={"ID":"6de094f2-4a0c-4e9d-bf92-9668110af217","Type":"ContainerDied","Data":"911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7"} Jun 06 09:24:51 crc kubenswrapper[4911]: I0606 09:24:51.266971 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rplh2" event={"ID":"6de094f2-4a0c-4e9d-bf92-9668110af217","Type":"ContainerStarted","Data":"b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91"} Jun 06 09:24:51 crc kubenswrapper[4911]: I0606 09:24:51.282838 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rplh2" podStartSLOduration=2.8587448479999997 podStartE2EDuration="4.282816752s" podCreationTimestamp="2025-06-06 09:24:47 +0000 UTC" firstStartedPulling="2025-06-06 09:24:49.25371086 +0000 UTC m=+700.529136403" lastFinishedPulling="2025-06-06 09:24:50.677782764 +0000 UTC m=+701.953208307" observedRunningTime="2025-06-06 09:24:51.282233947 +0000 UTC m=+702.557659520" watchObservedRunningTime="2025-06-06 09:24:51.282816752 +0000 UTC m=+702.558242315" Jun 06 09:24:54 crc kubenswrapper[4911]: I0606 09:24:54.300349 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:24:54 crc kubenswrapper[4911]: I0606 09:24:54.300687 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:24:57 crc kubenswrapper[4911]: I0606 09:24:57.787366 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:57 crc kubenswrapper[4911]: I0606 09:24:57.787733 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:57 crc kubenswrapper[4911]: I0606 09:24:57.827973 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:58 crc kubenswrapper[4911]: I0606 09:24:58.345147 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:24:58 crc kubenswrapper[4911]: I0606 09:24:58.387488 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rplh2"] Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.325945 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rplh2" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="registry-server" containerID="cri-o://b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91" gracePeriod=2 Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.727298 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.923827 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8nf8\" (UniqueName: \"kubernetes.io/projected/6de094f2-4a0c-4e9d-bf92-9668110af217-kube-api-access-f8nf8\") pod \"6de094f2-4a0c-4e9d-bf92-9668110af217\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.923898 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-catalog-content\") pod \"6de094f2-4a0c-4e9d-bf92-9668110af217\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.923933 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-utilities\") pod \"6de094f2-4a0c-4e9d-bf92-9668110af217\" (UID: \"6de094f2-4a0c-4e9d-bf92-9668110af217\") " Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.925041 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-utilities" (OuterVolumeSpecName: "utilities") pod "6de094f2-4a0c-4e9d-bf92-9668110af217" (UID: "6de094f2-4a0c-4e9d-bf92-9668110af217"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.931714 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6de094f2-4a0c-4e9d-bf92-9668110af217-kube-api-access-f8nf8" (OuterVolumeSpecName: "kube-api-access-f8nf8") pod "6de094f2-4a0c-4e9d-bf92-9668110af217" (UID: "6de094f2-4a0c-4e9d-bf92-9668110af217"). InnerVolumeSpecName "kube-api-access-f8nf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:25:00 crc kubenswrapper[4911]: I0606 09:25:00.933681 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6de094f2-4a0c-4e9d-bf92-9668110af217" (UID: "6de094f2-4a0c-4e9d-bf92-9668110af217"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.025128 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.025167 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6de094f2-4a0c-4e9d-bf92-9668110af217-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.025177 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8nf8\" (UniqueName: \"kubernetes.io/projected/6de094f2-4a0c-4e9d-bf92-9668110af217-kube-api-access-f8nf8\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.333346 4911 generic.go:334] "Generic (PLEG): container finished" podID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerID="b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91" exitCode=0 Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.333389 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rplh2" event={"ID":"6de094f2-4a0c-4e9d-bf92-9668110af217","Type":"ContainerDied","Data":"b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91"} Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.333399 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rplh2" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.333424 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rplh2" event={"ID":"6de094f2-4a0c-4e9d-bf92-9668110af217","Type":"ContainerDied","Data":"8dfa8ac2e5b6d0cd89fed03d506f7e3baf22370430765a3cf026e2cb0b1df9c9"} Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.333454 4911 scope.go:117] "RemoveContainer" containerID="b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.355573 4911 scope.go:117] "RemoveContainer" containerID="911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.361518 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rplh2"] Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.364790 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rplh2"] Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.377913 4911 scope.go:117] "RemoveContainer" containerID="bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.393529 4911 scope.go:117] "RemoveContainer" containerID="b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91" Jun 06 09:25:01 crc kubenswrapper[4911]: E0606 09:25:01.394053 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91\": container with ID starting with b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91 not found: ID does not exist" containerID="b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.394106 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91"} err="failed to get container status \"b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91\": rpc error: code = NotFound desc = could not find container \"b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91\": container with ID starting with b5563d2f48872057a13a0176e1be6d439b8b2161ef21471e9fbe0eccfbf9ba91 not found: ID does not exist" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.394136 4911 scope.go:117] "RemoveContainer" containerID="911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7" Jun 06 09:25:01 crc kubenswrapper[4911]: E0606 09:25:01.394432 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7\": container with ID starting with 911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7 not found: ID does not exist" containerID="911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.394449 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7"} err="failed to get container status \"911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7\": rpc error: code = NotFound desc = could not find container \"911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7\": container with ID starting with 911451b5731683f36f870bcf343bf3d8ab01fe24a94b83a2e937a4cd72931cf7 not found: ID does not exist" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.394462 4911 scope.go:117] "RemoveContainer" containerID="bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd" Jun 06 09:25:01 crc kubenswrapper[4911]: E0606 09:25:01.394653 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd\": container with ID starting with bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd not found: ID does not exist" containerID="bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.394671 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd"} err="failed to get container status \"bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd\": rpc error: code = NotFound desc = could not find container \"bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd\": container with ID starting with bc760ebe7fbe0fd139e481e561ac9b552bc9c2ea4ab486fe24ca7c52b044ebfd not found: ID does not exist" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.954843 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" path="/var/lib/kubelet/pods/6de094f2-4a0c-4e9d-bf92-9668110af217/volumes" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.962719 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-qjgbt"] Jun 06 09:25:01 crc kubenswrapper[4911]: E0606 09:25:01.962968 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="registry-server" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.962980 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="registry-server" Jun 06 09:25:01 crc kubenswrapper[4911]: E0606 09:25:01.962996 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="extract-utilities" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.963002 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="extract-utilities" Jun 06 09:25:01 crc kubenswrapper[4911]: E0606 09:25:01.963014 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="extract-content" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.963021 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="extract-content" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.963130 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6de094f2-4a0c-4e9d-bf92-9668110af217" containerName="registry-server" Jun 06 09:25:01 crc kubenswrapper[4911]: I0606 09:25:01.963565 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.036311 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d5dded3-20ed-4ade-98d9-e8c359627137-host\") pod \"crc-debug-qjgbt\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.036448 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnbvn\" (UniqueName: \"kubernetes.io/projected/1d5dded3-20ed-4ade-98d9-e8c359627137-kube-api-access-tnbvn\") pod \"crc-debug-qjgbt\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.137716 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d5dded3-20ed-4ade-98d9-e8c359627137-host\") pod \"crc-debug-qjgbt\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.137829 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnbvn\" (UniqueName: \"kubernetes.io/projected/1d5dded3-20ed-4ade-98d9-e8c359627137-kube-api-access-tnbvn\") pod \"crc-debug-qjgbt\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.137836 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d5dded3-20ed-4ade-98d9-e8c359627137-host\") pod \"crc-debug-qjgbt\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.154419 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnbvn\" (UniqueName: \"kubernetes.io/projected/1d5dded3-20ed-4ade-98d9-e8c359627137-kube-api-access-tnbvn\") pod \"crc-debug-qjgbt\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.292082 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjgbt" Jun 06 09:25:02 crc kubenswrapper[4911]: I0606 09:25:02.339116 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qjgbt" event={"ID":"1d5dded3-20ed-4ade-98d9-e8c359627137","Type":"ContainerStarted","Data":"4d7e1fe1f883468f115624e024ac5ab5ee0027e82251e6de008b15d223f846de"} Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.035630 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-58775bb567-66nbp" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.347881 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qjgbt" event={"ID":"1d5dded3-20ed-4ade-98d9-e8c359627137","Type":"ContainerStarted","Data":"a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf"} Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.367365 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-qjgbt" podStartSLOduration=2.367334038 podStartE2EDuration="2.367334038s" podCreationTimestamp="2025-06-06 09:25:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:25:03.364618208 +0000 UTC m=+714.640043761" watchObservedRunningTime="2025-06-06 09:25:03.367334038 +0000 UTC m=+714.642759591" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.805320 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-mxfmj"] Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.808678 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.813589 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.813763 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-w9q7f" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.813870 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.813965 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk"] Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.815152 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.816406 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.825137 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk"] Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.862656 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-reloader\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.862771 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.862804 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c6gq\" (UniqueName: \"kubernetes.io/projected/aded57b8-2cbf-41c5-ada2-e7768d87ab83-kube-api-access-6c6gq\") pod \"frr-k8s-webhook-server-8457d999f9-ghvnk\" (UID: \"aded57b8-2cbf-41c5-ada2-e7768d87ab83\") " pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.862863 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx9j2\" (UniqueName: \"kubernetes.io/projected/1e336f61-fb9b-45f7-ba82-f09f0d38d592-kube-api-access-sx9j2\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.862889 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics-certs\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.862913 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-conf\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.862940 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-startup\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.863047 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aded57b8-2cbf-41c5-ada2-e7768d87ab83-cert\") pod \"frr-k8s-webhook-server-8457d999f9-ghvnk\" (UID: \"aded57b8-2cbf-41c5-ada2-e7768d87ab83\") " pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.863104 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-sockets\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.874852 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-bvtzh"] Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.875968 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bvtzh" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.877893 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.878143 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.878217 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.878953 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-fz8qc" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.903825 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5f968f88cc-s8ttm"] Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.904880 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.909258 4911 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.912390 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5f968f88cc-s8ttm"] Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963606 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/5e555a27-714d-4b49-8e05-1cab47ab1a16-metallb-excludel2\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963658 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cl5m\" (UniqueName: \"kubernetes.io/projected/5e555a27-714d-4b49-8e05-1cab47ab1a16-kube-api-access-5cl5m\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963688 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963712 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c6gq\" (UniqueName: \"kubernetes.io/projected/aded57b8-2cbf-41c5-ada2-e7768d87ab83-kube-api-access-6c6gq\") pod \"frr-k8s-webhook-server-8457d999f9-ghvnk\" (UID: \"aded57b8-2cbf-41c5-ada2-e7768d87ab83\") " pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963732 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c811786-a64d-49bb-93d8-88f5846c7462-metrics-certs\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963746 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c811786-a64d-49bb-93d8-88f5846c7462-cert\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963769 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx9j2\" (UniqueName: \"kubernetes.io/projected/1e336f61-fb9b-45f7-ba82-f09f0d38d592-kube-api-access-sx9j2\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963788 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics-certs\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.963807 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-conf\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: E0606 09:25:03.963912 4911 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Jun 06 09:25:03 crc kubenswrapper[4911]: E0606 09:25:03.963968 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics-certs podName:1e336f61-fb9b-45f7-ba82-f09f0d38d592 nodeName:}" failed. No retries permitted until 2025-06-06 09:25:04.463945992 +0000 UTC m=+715.739371545 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics-certs") pod "frr-k8s-mxfmj" (UID: "1e336f61-fb9b-45f7-ba82-f09f0d38d592") : secret "frr-k8s-certs-secret" not found Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.964111 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.964194 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-startup\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.964220 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-metrics-certs\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.965057 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.965162 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aded57b8-2cbf-41c5-ada2-e7768d87ab83-cert\") pod \"frr-k8s-webhook-server-8457d999f9-ghvnk\" (UID: \"aded57b8-2cbf-41c5-ada2-e7768d87ab83\") " pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.965188 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zxw2\" (UniqueName: \"kubernetes.io/projected/6c811786-a64d-49bb-93d8-88f5846c7462-kube-api-access-5zxw2\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.965213 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-sockets\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.964992 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-startup\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.964291 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-conf\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.965318 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-reloader\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.965522 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-frr-sockets\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.965626 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1e336f61-fb9b-45f7-ba82-f09f0d38d592-reloader\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.979687 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx9j2\" (UniqueName: \"kubernetes.io/projected/1e336f61-fb9b-45f7-ba82-f09f0d38d592-kube-api-access-sx9j2\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.979866 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aded57b8-2cbf-41c5-ada2-e7768d87ab83-cert\") pod \"frr-k8s-webhook-server-8457d999f9-ghvnk\" (UID: \"aded57b8-2cbf-41c5-ada2-e7768d87ab83\") " pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:03 crc kubenswrapper[4911]: I0606 09:25:03.981661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c6gq\" (UniqueName: \"kubernetes.io/projected/aded57b8-2cbf-41c5-ada2-e7768d87ab83-kube-api-access-6c6gq\") pod \"frr-k8s-webhook-server-8457d999f9-ghvnk\" (UID: \"aded57b8-2cbf-41c5-ada2-e7768d87ab83\") " pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.067838 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/5e555a27-714d-4b49-8e05-1cab47ab1a16-metallb-excludel2\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.067905 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cl5m\" (UniqueName: \"kubernetes.io/projected/5e555a27-714d-4b49-8e05-1cab47ab1a16-kube-api-access-5cl5m\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.067935 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c811786-a64d-49bb-93d8-88f5846c7462-metrics-certs\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.067962 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c811786-a64d-49bb-93d8-88f5846c7462-cert\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.068020 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-metrics-certs\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.068055 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.068078 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zxw2\" (UniqueName: \"kubernetes.io/projected/6c811786-a64d-49bb-93d8-88f5846c7462-kube-api-access-5zxw2\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.068721 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/5e555a27-714d-4b49-8e05-1cab47ab1a16-metallb-excludel2\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: E0606 09:25:04.069795 4911 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jun 06 09:25:04 crc kubenswrapper[4911]: E0606 09:25:04.069845 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist podName:5e555a27-714d-4b49-8e05-1cab47ab1a16 nodeName:}" failed. No retries permitted until 2025-06-06 09:25:04.56983121 +0000 UTC m=+715.845256753 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist") pod "speaker-bvtzh" (UID: "5e555a27-714d-4b49-8e05-1cab47ab1a16") : secret "metallb-memberlist" not found Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.071773 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6c811786-a64d-49bb-93d8-88f5846c7462-cert\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.071991 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-metrics-certs\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.073396 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c811786-a64d-49bb-93d8-88f5846c7462-metrics-certs\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.086869 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zxw2\" (UniqueName: \"kubernetes.io/projected/6c811786-a64d-49bb-93d8-88f5846c7462-kube-api-access-5zxw2\") pod \"controller-5f968f88cc-s8ttm\" (UID: \"6c811786-a64d-49bb-93d8-88f5846c7462\") " pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.092472 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cl5m\" (UniqueName: \"kubernetes.io/projected/5e555a27-714d-4b49-8e05-1cab47ab1a16-kube-api-access-5cl5m\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.137974 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.218391 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.485793 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics-certs\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.498682 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1e336f61-fb9b-45f7-ba82-f09f0d38d592-metrics-certs\") pod \"frr-k8s-mxfmj\" (UID: \"1e336f61-fb9b-45f7-ba82-f09f0d38d592\") " pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.556982 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk"] Jun 06 09:25:04 crc kubenswrapper[4911]: W0606 09:25:04.561399 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaded57b8_2cbf_41c5_ada2_e7768d87ab83.slice/crio-216fabf2b4a631635cbe55bb5684ad0555d561a22a0ebb732b14443f4374e961 WatchSource:0}: Error finding container 216fabf2b4a631635cbe55bb5684ad0555d561a22a0ebb732b14443f4374e961: Status 404 returned error can't find the container with id 216fabf2b4a631635cbe55bb5684ad0555d561a22a0ebb732b14443f4374e961 Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.587377 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:04 crc kubenswrapper[4911]: E0606 09:25:04.587662 4911 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jun 06 09:25:04 crc kubenswrapper[4911]: E0606 09:25:04.587746 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist podName:5e555a27-714d-4b49-8e05-1cab47ab1a16 nodeName:}" failed. No retries permitted until 2025-06-06 09:25:05.587726462 +0000 UTC m=+716.863152005 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist") pod "speaker-bvtzh" (UID: "5e555a27-714d-4b49-8e05-1cab47ab1a16") : secret "metallb-memberlist" not found Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.608942 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5f968f88cc-s8ttm"] Jun 06 09:25:04 crc kubenswrapper[4911]: W0606 09:25:04.620803 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c811786_a64d_49bb_93d8_88f5846c7462.slice/crio-e2c7ce4109d8470818321c29e47c70eae223c27cc9b95fbe4ee2627bd50d651b WatchSource:0}: Error finding container e2c7ce4109d8470818321c29e47c70eae223c27cc9b95fbe4ee2627bd50d651b: Status 404 returned error can't find the container with id e2c7ce4109d8470818321c29e47c70eae223c27cc9b95fbe4ee2627bd50d651b Jun 06 09:25:04 crc kubenswrapper[4911]: I0606 09:25:04.728177 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:05 crc kubenswrapper[4911]: I0606 09:25:05.360038 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerStarted","Data":"5259217161fcadb59e0449d611eba6cdddd4b916604a2732f3179f8c70f71180"} Jun 06 09:25:05 crc kubenswrapper[4911]: I0606 09:25:05.361485 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5f968f88cc-s8ttm" event={"ID":"6c811786-a64d-49bb-93d8-88f5846c7462","Type":"ContainerStarted","Data":"76b2dc9cd663ef86d1e6b68e3cbf19787861a4a8df34619db80ffd533a7f49a1"} Jun 06 09:25:05 crc kubenswrapper[4911]: I0606 09:25:05.361519 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5f968f88cc-s8ttm" event={"ID":"6c811786-a64d-49bb-93d8-88f5846c7462","Type":"ContainerStarted","Data":"e2c7ce4109d8470818321c29e47c70eae223c27cc9b95fbe4ee2627bd50d651b"} Jun 06 09:25:05 crc kubenswrapper[4911]: I0606 09:25:05.362969 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" event={"ID":"aded57b8-2cbf-41c5-ada2-e7768d87ab83","Type":"ContainerStarted","Data":"216fabf2b4a631635cbe55bb5684ad0555d561a22a0ebb732b14443f4374e961"} Jun 06 09:25:05 crc kubenswrapper[4911]: I0606 09:25:05.609777 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:05 crc kubenswrapper[4911]: I0606 09:25:05.615262 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/5e555a27-714d-4b49-8e05-1cab47ab1a16-memberlist\") pod \"speaker-bvtzh\" (UID: \"5e555a27-714d-4b49-8e05-1cab47ab1a16\") " pod="metallb-system/speaker-bvtzh" Jun 06 09:25:05 crc kubenswrapper[4911]: I0606 09:25:05.689402 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bvtzh" Jun 06 09:25:06 crc kubenswrapper[4911]: I0606 09:25:06.373761 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bvtzh" event={"ID":"5e555a27-714d-4b49-8e05-1cab47ab1a16","Type":"ContainerStarted","Data":"7597b7a7c65c79cd7dbe7ff611788591a1c93d5b11c2f065b4e7789305b16121"} Jun 06 09:25:06 crc kubenswrapper[4911]: I0606 09:25:06.373809 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bvtzh" event={"ID":"5e555a27-714d-4b49-8e05-1cab47ab1a16","Type":"ContainerStarted","Data":"f5da56c4bc0341b39a5880937b02f05744bfb3a6a25298a4f3a10648e6620710"} Jun 06 09:25:09 crc kubenswrapper[4911]: I0606 09:25:09.399345 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bvtzh" event={"ID":"5e555a27-714d-4b49-8e05-1cab47ab1a16","Type":"ContainerStarted","Data":"350750bb125ffe46a42eda7101ed18a8c96adc97ff2c2ad07ca222f0fced3313"} Jun 06 09:25:09 crc kubenswrapper[4911]: I0606 09:25:09.399912 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-bvtzh" Jun 06 09:25:09 crc kubenswrapper[4911]: I0606 09:25:09.401949 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5f968f88cc-s8ttm" event={"ID":"6c811786-a64d-49bb-93d8-88f5846c7462","Type":"ContainerStarted","Data":"19dfed9797701dbe4e1d87f37b071809a4b285688caad2f639666b8bf4487aae"} Jun 06 09:25:09 crc kubenswrapper[4911]: I0606 09:25:09.402165 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:09 crc kubenswrapper[4911]: I0606 09:25:09.423831 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-bvtzh" podStartSLOduration=4.214020811 podStartE2EDuration="6.42379562s" podCreationTimestamp="2025-06-06 09:25:03 +0000 UTC" firstStartedPulling="2025-06-06 09:25:06.081437475 +0000 UTC m=+717.356863018" lastFinishedPulling="2025-06-06 09:25:08.291212284 +0000 UTC m=+719.566637827" observedRunningTime="2025-06-06 09:25:09.41991135 +0000 UTC m=+720.695336913" watchObservedRunningTime="2025-06-06 09:25:09.42379562 +0000 UTC m=+720.699221163" Jun 06 09:25:09 crc kubenswrapper[4911]: I0606 09:25:09.436201 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5f968f88cc-s8ttm" podStartSLOduration=2.899259234 podStartE2EDuration="6.43617817s" podCreationTimestamp="2025-06-06 09:25:03 +0000 UTC" firstStartedPulling="2025-06-06 09:25:04.753250691 +0000 UTC m=+716.028676234" lastFinishedPulling="2025-06-06 09:25:08.290169627 +0000 UTC m=+719.565595170" observedRunningTime="2025-06-06 09:25:09.434722393 +0000 UTC m=+720.710147936" watchObservedRunningTime="2025-06-06 09:25:09.43617817 +0000 UTC m=+720.711603713" Jun 06 09:25:11 crc kubenswrapper[4911]: I0606 09:25:11.415990 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" event={"ID":"aded57b8-2cbf-41c5-ada2-e7768d87ab83","Type":"ContainerStarted","Data":"271f7a8b63571f62c6cbe46f20237816a2260107f35cafffd58cc7a354f7fe33"} Jun 06 09:25:11 crc kubenswrapper[4911]: I0606 09:25:11.417407 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:11 crc kubenswrapper[4911]: I0606 09:25:11.419533 4911 generic.go:334] "Generic (PLEG): container finished" podID="1e336f61-fb9b-45f7-ba82-f09f0d38d592" containerID="0146d2ade9b22a4ed324cd875b898688d5f9f536ee4b3d14756fde0afdadbac3" exitCode=0 Jun 06 09:25:11 crc kubenswrapper[4911]: I0606 09:25:11.419580 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerDied","Data":"0146d2ade9b22a4ed324cd875b898688d5f9f536ee4b3d14756fde0afdadbac3"} Jun 06 09:25:11 crc kubenswrapper[4911]: I0606 09:25:11.436464 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" podStartSLOduration=1.92601727 podStartE2EDuration="8.436443893s" podCreationTimestamp="2025-06-06 09:25:03 +0000 UTC" firstStartedPulling="2025-06-06 09:25:04.563707921 +0000 UTC m=+715.839133464" lastFinishedPulling="2025-06-06 09:25:11.074134544 +0000 UTC m=+722.349560087" observedRunningTime="2025-06-06 09:25:11.435500328 +0000 UTC m=+722.710925891" watchObservedRunningTime="2025-06-06 09:25:11.436443893 +0000 UTC m=+722.711869446" Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.429730 4911 generic.go:334] "Generic (PLEG): container finished" podID="1e336f61-fb9b-45f7-ba82-f09f0d38d592" containerID="c0eec5fb7755d9389b53d8982e2620167b57b508c6131d231b5dc7c695ca33bd" exitCode=0 Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.429822 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerDied","Data":"c0eec5fb7755d9389b53d8982e2620167b57b508c6131d231b5dc7c695ca33bd"} Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.614859 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-qjgbt"] Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.615146 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-qjgbt" podUID="1d5dded3-20ed-4ade-98d9-e8c359627137" containerName="container-00" containerID="cri-o://a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf" gracePeriod=2 Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.618670 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-qjgbt"] Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.700427 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjgbt" Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.710561 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnbvn\" (UniqueName: \"kubernetes.io/projected/1d5dded3-20ed-4ade-98d9-e8c359627137-kube-api-access-tnbvn\") pod \"1d5dded3-20ed-4ade-98d9-e8c359627137\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.711005 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d5dded3-20ed-4ade-98d9-e8c359627137-host\") pod \"1d5dded3-20ed-4ade-98d9-e8c359627137\" (UID: \"1d5dded3-20ed-4ade-98d9-e8c359627137\") " Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.711297 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d5dded3-20ed-4ade-98d9-e8c359627137-host" (OuterVolumeSpecName: "host") pod "1d5dded3-20ed-4ade-98d9-e8c359627137" (UID: "1d5dded3-20ed-4ade-98d9-e8c359627137"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.711594 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d5dded3-20ed-4ade-98d9-e8c359627137-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.716399 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d5dded3-20ed-4ade-98d9-e8c359627137-kube-api-access-tnbvn" (OuterVolumeSpecName: "kube-api-access-tnbvn") pod "1d5dded3-20ed-4ade-98d9-e8c359627137" (UID: "1d5dded3-20ed-4ade-98d9-e8c359627137"). InnerVolumeSpecName "kube-api-access-tnbvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:25:12 crc kubenswrapper[4911]: I0606 09:25:12.813323 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnbvn\" (UniqueName: \"kubernetes.io/projected/1d5dded3-20ed-4ade-98d9-e8c359627137-kube-api-access-tnbvn\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.436757 4911 generic.go:334] "Generic (PLEG): container finished" podID="1d5dded3-20ed-4ade-98d9-e8c359627137" containerID="a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf" exitCode=0 Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.436856 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjgbt" Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.436880 4911 scope.go:117] "RemoveContainer" containerID="a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf" Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.439554 4911 generic.go:334] "Generic (PLEG): container finished" podID="1e336f61-fb9b-45f7-ba82-f09f0d38d592" containerID="936364591f681b0c1ff7c8357bd0e1054453ee54d4f14a7f63ef6c0b50ff09e7" exitCode=0 Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.439639 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerDied","Data":"936364591f681b0c1ff7c8357bd0e1054453ee54d4f14a7f63ef6c0b50ff09e7"} Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.455782 4911 scope.go:117] "RemoveContainer" containerID="a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf" Jun 06 09:25:13 crc kubenswrapper[4911]: E0606 09:25:13.456524 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf\": container with ID starting with a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf not found: ID does not exist" containerID="a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf" Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.456563 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf"} err="failed to get container status \"a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf\": rpc error: code = NotFound desc = could not find container \"a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf\": container with ID starting with a22eea9d5e5a24566bf4b5bc6dbc97d9c33691203e3332c9bbe9d1d175cfebcf not found: ID does not exist" Jun 06 09:25:13 crc kubenswrapper[4911]: I0606 09:25:13.955750 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d5dded3-20ed-4ade-98d9-e8c359627137" path="/var/lib/kubelet/pods/1d5dded3-20ed-4ade-98d9-e8c359627137/volumes" Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.224531 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5f968f88cc-s8ttm" Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.454498 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerStarted","Data":"3c6310ab974f9ee24c7336419704faa9bdb9b2d6634d9851ba27dd9506db6622"} Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.454905 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerStarted","Data":"dd6340334512b6b27285f8246a6fb9f1bb1e59df722bf7e12f9a62bd476a3b90"} Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.454935 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.454947 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerStarted","Data":"9cfe8fdde57440548a8949bfc715a8c90481095700640704454694ee52d05420"} Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.454958 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerStarted","Data":"9a9de01cc7f0ecfe9803233858a6926323b2d3774086127ed912de8073c90ae7"} Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.454969 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerStarted","Data":"afd4e1dd6ec82f468be0936e87cb32b843968ac4f3d68db5a4ac6a361e11f084"} Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.454979 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mxfmj" event={"ID":"1e336f61-fb9b-45f7-ba82-f09f0d38d592","Type":"ContainerStarted","Data":"c057a3118ea22fc8ac374849a95cc4f4327087d4c1af3473e6154142dbf2e633"} Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.479221 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-mxfmj" podStartSLOduration=5.21762725 podStartE2EDuration="11.4792014s" podCreationTimestamp="2025-06-06 09:25:03 +0000 UTC" firstStartedPulling="2025-06-06 09:25:04.83250372 +0000 UTC m=+716.107929263" lastFinishedPulling="2025-06-06 09:25:11.09407787 +0000 UTC m=+722.369503413" observedRunningTime="2025-06-06 09:25:14.476396758 +0000 UTC m=+725.751822321" watchObservedRunningTime="2025-06-06 09:25:14.4792014 +0000 UTC m=+725.754626943" Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.728975 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:14 crc kubenswrapper[4911]: I0606 09:25:14.771834 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.370771 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5255x"] Jun 06 09:25:17 crc kubenswrapper[4911]: E0606 09:25:17.371638 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d5dded3-20ed-4ade-98d9-e8c359627137" containerName="container-00" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.371657 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d5dded3-20ed-4ade-98d9-e8c359627137" containerName="container-00" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.371807 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d5dded3-20ed-4ade-98d9-e8c359627137" containerName="container-00" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.372814 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.389468 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5255x"] Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.572235 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-catalog-content\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.572553 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-utilities\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.572687 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6225\" (UniqueName: \"kubernetes.io/projected/47706ecf-7c29-49c1-941f-c54f75ba53b6-kube-api-access-v6225\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.674447 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-catalog-content\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.674812 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-utilities\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.674941 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6225\" (UniqueName: \"kubernetes.io/projected/47706ecf-7c29-49c1-941f-c54f75ba53b6-kube-api-access-v6225\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.675087 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-catalog-content\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.675629 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-utilities\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.696448 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6225\" (UniqueName: \"kubernetes.io/projected/47706ecf-7c29-49c1-941f-c54f75ba53b6-kube-api-access-v6225\") pod \"certified-operators-5255x\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:17 crc kubenswrapper[4911]: I0606 09:25:17.993988 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:18 crc kubenswrapper[4911]: I0606 09:25:18.453859 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5255x"] Jun 06 09:25:18 crc kubenswrapper[4911]: I0606 09:25:18.488048 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5255x" event={"ID":"47706ecf-7c29-49c1-941f-c54f75ba53b6","Type":"ContainerStarted","Data":"5d0b85067603458b7c223ef9b2a022b9c232bb3ccb08f0995c3df46004d34fea"} Jun 06 09:25:19 crc kubenswrapper[4911]: I0606 09:25:19.500710 4911 generic.go:334] "Generic (PLEG): container finished" podID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerID="e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd" exitCode=0 Jun 06 09:25:19 crc kubenswrapper[4911]: I0606 09:25:19.500796 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5255x" event={"ID":"47706ecf-7c29-49c1-941f-c54f75ba53b6","Type":"ContainerDied","Data":"e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd"} Jun 06 09:25:20 crc kubenswrapper[4911]: I0606 09:25:20.509707 4911 generic.go:334] "Generic (PLEG): container finished" podID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerID="4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b" exitCode=0 Jun 06 09:25:20 crc kubenswrapper[4911]: I0606 09:25:20.509763 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5255x" event={"ID":"47706ecf-7c29-49c1-941f-c54f75ba53b6","Type":"ContainerDied","Data":"4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b"} Jun 06 09:25:21 crc kubenswrapper[4911]: I0606 09:25:21.517721 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5255x" event={"ID":"47706ecf-7c29-49c1-941f-c54f75ba53b6","Type":"ContainerStarted","Data":"a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f"} Jun 06 09:25:21 crc kubenswrapper[4911]: I0606 09:25:21.542800 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5255x" podStartSLOduration=3.157777054 podStartE2EDuration="4.542771137s" podCreationTimestamp="2025-06-06 09:25:17 +0000 UTC" firstStartedPulling="2025-06-06 09:25:19.503816534 +0000 UTC m=+730.779242077" lastFinishedPulling="2025-06-06 09:25:20.888810617 +0000 UTC m=+732.164236160" observedRunningTime="2025-06-06 09:25:21.538886567 +0000 UTC m=+732.814312130" watchObservedRunningTime="2025-06-06 09:25:21.542771137 +0000 UTC m=+732.818196680" Jun 06 09:25:24 crc kubenswrapper[4911]: I0606 09:25:24.146714 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-8457d999f9-ghvnk" Jun 06 09:25:24 crc kubenswrapper[4911]: I0606 09:25:24.300726 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:25:24 crc kubenswrapper[4911]: I0606 09:25:24.300815 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:25:24 crc kubenswrapper[4911]: I0606 09:25:24.730940 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-mxfmj" Jun 06 09:25:25 crc kubenswrapper[4911]: I0606 09:25:25.694006 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-bvtzh" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.262320 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b"] Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.263975 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.266232 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.272286 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b"] Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.310769 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsfg4\" (UniqueName: \"kubernetes.io/projected/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-kube-api-access-qsfg4\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.310873 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-util\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.310914 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-bundle\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.412616 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsfg4\" (UniqueName: \"kubernetes.io/projected/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-kube-api-access-qsfg4\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.412711 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-util\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.412737 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-bundle\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.413340 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-util\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.413383 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-bundle\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.431259 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsfg4\" (UniqueName: \"kubernetes.io/projected/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-kube-api-access-qsfg4\") pod \"8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:26 crc kubenswrapper[4911]: I0606 09:25:26.619385 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:27 crc kubenswrapper[4911]: I0606 09:25:27.022937 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b"] Jun 06 09:25:27 crc kubenswrapper[4911]: I0606 09:25:27.551161 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" event={"ID":"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6","Type":"ContainerStarted","Data":"de896a87139ef2bba08a607a483439de7bef09e174284c31c83943c4435a31fc"} Jun 06 09:25:27 crc kubenswrapper[4911]: I0606 09:25:27.994318 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:27 crc kubenswrapper[4911]: I0606 09:25:27.994402 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:28 crc kubenswrapper[4911]: I0606 09:25:28.051477 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:28 crc kubenswrapper[4911]: I0606 09:25:28.559116 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerID="b87a1480abad67c4c777e43018524e0d4e17d1ac71236557169f53d6e626f94e" exitCode=0 Jun 06 09:25:28 crc kubenswrapper[4911]: I0606 09:25:28.559244 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" event={"ID":"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6","Type":"ContainerDied","Data":"b87a1480abad67c4c777e43018524e0d4e17d1ac71236557169f53d6e626f94e"} Jun 06 09:25:28 crc kubenswrapper[4911]: I0606 09:25:28.601843 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:30 crc kubenswrapper[4911]: I0606 09:25:30.411223 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5255x"] Jun 06 09:25:30 crc kubenswrapper[4911]: I0606 09:25:30.571386 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerID="001ab46f8cff6886d5fe78ed5237d754838e38358cd526dd3e290b1543fc458b" exitCode=0 Jun 06 09:25:30 crc kubenswrapper[4911]: I0606 09:25:30.571435 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" event={"ID":"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6","Type":"ContainerDied","Data":"001ab46f8cff6886d5fe78ed5237d754838e38358cd526dd3e290b1543fc458b"} Jun 06 09:25:30 crc kubenswrapper[4911]: I0606 09:25:30.571745 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5255x" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="registry-server" containerID="cri-o://a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f" gracePeriod=2 Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.584750 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerID="96bf3747387bf7944714379bdfcdb74f80c3ca37a2527c945517996cfa17066a" exitCode=0 Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.584805 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" event={"ID":"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6","Type":"ContainerDied","Data":"96bf3747387bf7944714379bdfcdb74f80c3ca37a2527c945517996cfa17066a"} Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.586500 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.588394 4911 generic.go:334] "Generic (PLEG): container finished" podID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerID="a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f" exitCode=0 Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.588465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5255x" event={"ID":"47706ecf-7c29-49c1-941f-c54f75ba53b6","Type":"ContainerDied","Data":"a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f"} Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.588516 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5255x" event={"ID":"47706ecf-7c29-49c1-941f-c54f75ba53b6","Type":"ContainerDied","Data":"5d0b85067603458b7c223ef9b2a022b9c232bb3ccb08f0995c3df46004d34fea"} Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.588542 4911 scope.go:117] "RemoveContainer" containerID="a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.606213 4911 scope.go:117] "RemoveContainer" containerID="4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.623605 4911 scope.go:117] "RemoveContainer" containerID="e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.642161 4911 scope.go:117] "RemoveContainer" containerID="a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f" Jun 06 09:25:31 crc kubenswrapper[4911]: E0606 09:25:31.642700 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f\": container with ID starting with a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f not found: ID does not exist" containerID="a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.642738 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f"} err="failed to get container status \"a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f\": rpc error: code = NotFound desc = could not find container \"a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f\": container with ID starting with a3258593fb7f6bf65878ea571d61a55095cbbf2ac5f02786655451532a6a8a6f not found: ID does not exist" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.642795 4911 scope.go:117] "RemoveContainer" containerID="4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b" Jun 06 09:25:31 crc kubenswrapper[4911]: E0606 09:25:31.643183 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b\": container with ID starting with 4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b not found: ID does not exist" containerID="4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.643226 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b"} err="failed to get container status \"4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b\": rpc error: code = NotFound desc = could not find container \"4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b\": container with ID starting with 4a8d3236205981b0083f74c7ff5180d9caf4f1a3927e4ca4875674d0ced6e51b not found: ID does not exist" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.643253 4911 scope.go:117] "RemoveContainer" containerID="e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd" Jun 06 09:25:31 crc kubenswrapper[4911]: E0606 09:25:31.643719 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd\": container with ID starting with e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd not found: ID does not exist" containerID="e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.643765 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd"} err="failed to get container status \"e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd\": rpc error: code = NotFound desc = could not find container \"e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd\": container with ID starting with e9678965935ae097179cce7449222339e0516fec9019ff5670f9951c2d25defd not found: ID does not exist" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.684048 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6225\" (UniqueName: \"kubernetes.io/projected/47706ecf-7c29-49c1-941f-c54f75ba53b6-kube-api-access-v6225\") pod \"47706ecf-7c29-49c1-941f-c54f75ba53b6\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.684128 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-utilities\") pod \"47706ecf-7c29-49c1-941f-c54f75ba53b6\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.684157 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-catalog-content\") pod \"47706ecf-7c29-49c1-941f-c54f75ba53b6\" (UID: \"47706ecf-7c29-49c1-941f-c54f75ba53b6\") " Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.684995 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-utilities" (OuterVolumeSpecName: "utilities") pod "47706ecf-7c29-49c1-941f-c54f75ba53b6" (UID: "47706ecf-7c29-49c1-941f-c54f75ba53b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.689841 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47706ecf-7c29-49c1-941f-c54f75ba53b6-kube-api-access-v6225" (OuterVolumeSpecName: "kube-api-access-v6225") pod "47706ecf-7c29-49c1-941f-c54f75ba53b6" (UID: "47706ecf-7c29-49c1-941f-c54f75ba53b6"). InnerVolumeSpecName "kube-api-access-v6225". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.713707 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47706ecf-7c29-49c1-941f-c54f75ba53b6" (UID: "47706ecf-7c29-49c1-941f-c54f75ba53b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.785621 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6225\" (UniqueName: \"kubernetes.io/projected/47706ecf-7c29-49c1-941f-c54f75ba53b6-kube-api-access-v6225\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.785953 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:31 crc kubenswrapper[4911]: I0606 09:25:31.785966 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47706ecf-7c29-49c1-941f-c54f75ba53b6-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:32 crc kubenswrapper[4911]: I0606 09:25:32.594709 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5255x" Jun 06 09:25:32 crc kubenswrapper[4911]: I0606 09:25:32.610859 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5255x"] Jun 06 09:25:32 crc kubenswrapper[4911]: I0606 09:25:32.614441 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5255x"] Jun 06 09:25:32 crc kubenswrapper[4911]: I0606 09:25:32.859824 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.031895 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-bundle\") pod \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.031956 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-util\") pod \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.032013 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsfg4\" (UniqueName: \"kubernetes.io/projected/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-kube-api-access-qsfg4\") pod \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\" (UID: \"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6\") " Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.033280 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-bundle" (OuterVolumeSpecName: "bundle") pod "fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" (UID: "fd9d9e39-0195-4749-a5f4-c8c802a9f3f6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.036041 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-kube-api-access-qsfg4" (OuterVolumeSpecName: "kube-api-access-qsfg4") pod "fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" (UID: "fd9d9e39-0195-4749-a5f4-c8c802a9f3f6"). InnerVolumeSpecName "kube-api-access-qsfg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.133429 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.133463 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsfg4\" (UniqueName: \"kubernetes.io/projected/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-kube-api-access-qsfg4\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.341027 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-util" (OuterVolumeSpecName: "util") pod "fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" (UID: "fd9d9e39-0195-4749-a5f4-c8c802a9f3f6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.437168 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd9d9e39-0195-4749-a5f4-c8c802a9f3f6-util\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.603693 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" event={"ID":"fd9d9e39-0195-4749-a5f4-c8c802a9f3f6","Type":"ContainerDied","Data":"de896a87139ef2bba08a607a483439de7bef09e174284c31c83943c4435a31fc"} Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.603742 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de896a87139ef2bba08a607a483439de7bef09e174284c31c83943c4435a31fc" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.603806 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b" Jun 06 09:25:33 crc kubenswrapper[4911]: I0606 09:25:33.958811 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" path="/var/lib/kubelet/pods/47706ecf-7c29-49c1-941f-c54f75ba53b6/volumes" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.116363 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx"] Jun 06 09:25:37 crc kubenswrapper[4911]: E0606 09:25:37.116957 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerName="extract" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.116974 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerName="extract" Jun 06 09:25:37 crc kubenswrapper[4911]: E0606 09:25:37.116990 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="extract-content" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.116998 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="extract-content" Jun 06 09:25:37 crc kubenswrapper[4911]: E0606 09:25:37.117011 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerName="util" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.117021 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerName="util" Jun 06 09:25:37 crc kubenswrapper[4911]: E0606 09:25:37.117034 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="extract-utilities" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.117041 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="extract-utilities" Jun 06 09:25:37 crc kubenswrapper[4911]: E0606 09:25:37.117054 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="registry-server" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.117061 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="registry-server" Jun 06 09:25:37 crc kubenswrapper[4911]: E0606 09:25:37.117074 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerName="pull" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.117081 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerName="pull" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.117215 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="47706ecf-7c29-49c1-941f-c54f75ba53b6" containerName="registry-server" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.117237 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd9d9e39-0195-4749-a5f4-c8c802a9f3f6" containerName="extract" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.117691 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.119506 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-5j2zk" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.119691 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.119976 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.129139 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx"] Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.245665 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2lgr\" (UniqueName: \"kubernetes.io/projected/abdedb83-822a-444e-a8ad-e843d4194830-kube-api-access-q2lgr\") pod \"nmstate-operator-6c5f9d4654-mw7jx\" (UID: \"abdedb83-822a-444e-a8ad-e843d4194830\") " pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.346614 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2lgr\" (UniqueName: \"kubernetes.io/projected/abdedb83-822a-444e-a8ad-e843d4194830-kube-api-access-q2lgr\") pod \"nmstate-operator-6c5f9d4654-mw7jx\" (UID: \"abdedb83-822a-444e-a8ad-e843d4194830\") " pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.368699 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2lgr\" (UniqueName: \"kubernetes.io/projected/abdedb83-822a-444e-a8ad-e843d4194830-kube-api-access-q2lgr\") pod \"nmstate-operator-6c5f9d4654-mw7jx\" (UID: \"abdedb83-822a-444e-a8ad-e843d4194830\") " pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.436851 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" Jun 06 09:25:37 crc kubenswrapper[4911]: I0606 09:25:37.896149 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx"] Jun 06 09:25:38 crc kubenswrapper[4911]: I0606 09:25:38.632945 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" event={"ID":"abdedb83-822a-444e-a8ad-e843d4194830","Type":"ContainerStarted","Data":"70c2bc64137279f0d7bf7ab0f104cfe45f5ef993947f341e9a51dac303315885"} Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.615326 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-khszf"] Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.616481 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.627000 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-khszf"] Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.779830 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-utilities\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.780526 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjh6v\" (UniqueName: \"kubernetes.io/projected/0da2e712-e087-4f30-914b-3ee685e54ee6-kube-api-access-bjh6v\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.780565 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-catalog-content\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.881759 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjh6v\" (UniqueName: \"kubernetes.io/projected/0da2e712-e087-4f30-914b-3ee685e54ee6-kube-api-access-bjh6v\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.881812 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-catalog-content\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.881841 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-utilities\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.882410 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-utilities\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.882532 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-catalog-content\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.900706 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjh6v\" (UniqueName: \"kubernetes.io/projected/0da2e712-e087-4f30-914b-3ee685e54ee6-kube-api-access-bjh6v\") pod \"community-operators-khszf\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:39 crc kubenswrapper[4911]: I0606 09:25:39.934400 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:40 crc kubenswrapper[4911]: I0606 09:25:40.283956 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-khszf"] Jun 06 09:25:40 crc kubenswrapper[4911]: W0606 09:25:40.308785 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0da2e712_e087_4f30_914b_3ee685e54ee6.slice/crio-c844ccdeb2b6b1ab631f043e29a1f2dab9ca432dff341f54599ebc88388612b8 WatchSource:0}: Error finding container c844ccdeb2b6b1ab631f043e29a1f2dab9ca432dff341f54599ebc88388612b8: Status 404 returned error can't find the container with id c844ccdeb2b6b1ab631f043e29a1f2dab9ca432dff341f54599ebc88388612b8 Jun 06 09:25:40 crc kubenswrapper[4911]: I0606 09:25:40.652042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" event={"ID":"abdedb83-822a-444e-a8ad-e843d4194830","Type":"ContainerStarted","Data":"96a220cc08f2e70bb0eb43d106660b2a8b19ad0f0cb2eb21bbf8f62a736b67c5"} Jun 06 09:25:40 crc kubenswrapper[4911]: I0606 09:25:40.654681 4911 generic.go:334] "Generic (PLEG): container finished" podID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerID="b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1" exitCode=0 Jun 06 09:25:40 crc kubenswrapper[4911]: I0606 09:25:40.654751 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khszf" event={"ID":"0da2e712-e087-4f30-914b-3ee685e54ee6","Type":"ContainerDied","Data":"b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1"} Jun 06 09:25:40 crc kubenswrapper[4911]: I0606 09:25:40.654791 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khszf" event={"ID":"0da2e712-e087-4f30-914b-3ee685e54ee6","Type":"ContainerStarted","Data":"c844ccdeb2b6b1ab631f043e29a1f2dab9ca432dff341f54599ebc88388612b8"} Jun 06 09:25:40 crc kubenswrapper[4911]: I0606 09:25:40.673396 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6c5f9d4654-mw7jx" podStartSLOduration=1.399073088 podStartE2EDuration="3.673377267s" podCreationTimestamp="2025-06-06 09:25:37 +0000 UTC" firstStartedPulling="2025-06-06 09:25:37.905838725 +0000 UTC m=+749.181264268" lastFinishedPulling="2025-06-06 09:25:40.180142904 +0000 UTC m=+751.455568447" observedRunningTime="2025-06-06 09:25:40.667227457 +0000 UTC m=+751.942653000" watchObservedRunningTime="2025-06-06 09:25:40.673377267 +0000 UTC m=+751.948802810" Jun 06 09:25:41 crc kubenswrapper[4911]: I0606 09:25:41.664003 4911 generic.go:334] "Generic (PLEG): container finished" podID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerID="7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7" exitCode=0 Jun 06 09:25:41 crc kubenswrapper[4911]: I0606 09:25:41.664063 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khszf" event={"ID":"0da2e712-e087-4f30-914b-3ee685e54ee6","Type":"ContainerDied","Data":"7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7"} Jun 06 09:25:42 crc kubenswrapper[4911]: I0606 09:25:42.670872 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khszf" event={"ID":"0da2e712-e087-4f30-914b-3ee685e54ee6","Type":"ContainerStarted","Data":"ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3"} Jun 06 09:25:42 crc kubenswrapper[4911]: I0606 09:25:42.691683 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-khszf" podStartSLOduration=2.271689888 podStartE2EDuration="3.691666775s" podCreationTimestamp="2025-06-06 09:25:39 +0000 UTC" firstStartedPulling="2025-06-06 09:25:40.657499686 +0000 UTC m=+751.932925219" lastFinishedPulling="2025-06-06 09:25:42.077476563 +0000 UTC m=+753.352902106" observedRunningTime="2025-06-06 09:25:42.687292962 +0000 UTC m=+753.962718515" watchObservedRunningTime="2025-06-06 09:25:42.691666775 +0000 UTC m=+753.967092318" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.026766 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-748555f888-rl4mq"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.029014 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.031500 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-rtgpq" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.035859 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.037013 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.042631 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-748555f888-rl4mq"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.046261 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.066926 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-47p8t"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.067788 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.076609 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.194235 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-nmstate-lock\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.194313 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88zs\" (UniqueName: \"kubernetes.io/projected/27855136-8ad1-414e-896b-97ff98a45f34-kube-api-access-v88zs\") pod \"nmstate-webhook-79c49d6bf4-kc7jr\" (UID: \"27855136-8ad1-414e-896b-97ff98a45f34\") " pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.194352 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq7vg\" (UniqueName: \"kubernetes.io/projected/cbc7d963-711d-4639-a21e-f5876e0aec15-kube-api-access-bq7vg\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.194385 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-ovs-socket\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.194505 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl9ds\" (UniqueName: \"kubernetes.io/projected/7f26fb38-be4a-43b1-b2ba-632ba751588c-kube-api-access-gl9ds\") pod \"nmstate-metrics-748555f888-rl4mq\" (UID: \"7f26fb38-be4a-43b1-b2ba-632ba751588c\") " pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.194536 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/27855136-8ad1-414e-896b-97ff98a45f34-tls-key-pair\") pod \"nmstate-webhook-79c49d6bf4-kc7jr\" (UID: \"27855136-8ad1-414e-896b-97ff98a45f34\") " pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.194596 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-dbus-socket\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.196061 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.196914 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.199403 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.199638 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.199881 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-7pqgq" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.204891 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.295825 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-nmstate-lock\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.295900 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsjwf\" (UniqueName: \"kubernetes.io/projected/d50d1712-3d8c-4212-9185-a4a2a186215c-kube-api-access-nsjwf\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.295931 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/d50d1712-3d8c-4212-9185-a4a2a186215c-plugin-serving-cert\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.295965 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88zs\" (UniqueName: \"kubernetes.io/projected/27855136-8ad1-414e-896b-97ff98a45f34-kube-api-access-v88zs\") pod \"nmstate-webhook-79c49d6bf4-kc7jr\" (UID: \"27855136-8ad1-414e-896b-97ff98a45f34\") " pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296002 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq7vg\" (UniqueName: \"kubernetes.io/projected/cbc7d963-711d-4639-a21e-f5876e0aec15-kube-api-access-bq7vg\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296030 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-nmstate-lock\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296218 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-ovs-socket\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296377 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-ovs-socket\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296384 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/d50d1712-3d8c-4212-9185-a4a2a186215c-nginx-conf\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296505 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl9ds\" (UniqueName: \"kubernetes.io/projected/7f26fb38-be4a-43b1-b2ba-632ba751588c-kube-api-access-gl9ds\") pod \"nmstate-metrics-748555f888-rl4mq\" (UID: \"7f26fb38-be4a-43b1-b2ba-632ba751588c\") " pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296540 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/27855136-8ad1-414e-896b-97ff98a45f34-tls-key-pair\") pod \"nmstate-webhook-79c49d6bf4-kc7jr\" (UID: \"27855136-8ad1-414e-896b-97ff98a45f34\") " pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.296609 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-dbus-socket\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.297027 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/cbc7d963-711d-4639-a21e-f5876e0aec15-dbus-socket\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.314043 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl9ds\" (UniqueName: \"kubernetes.io/projected/7f26fb38-be4a-43b1-b2ba-632ba751588c-kube-api-access-gl9ds\") pod \"nmstate-metrics-748555f888-rl4mq\" (UID: \"7f26fb38-be4a-43b1-b2ba-632ba751588c\") " pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.315804 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq7vg\" (UniqueName: \"kubernetes.io/projected/cbc7d963-711d-4639-a21e-f5876e0aec15-kube-api-access-bq7vg\") pod \"nmstate-handler-47p8t\" (UID: \"cbc7d963-711d-4639-a21e-f5876e0aec15\") " pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.319949 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/27855136-8ad1-414e-896b-97ff98a45f34-tls-key-pair\") pod \"nmstate-webhook-79c49d6bf4-kc7jr\" (UID: \"27855136-8ad1-414e-896b-97ff98a45f34\") " pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.330016 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88zs\" (UniqueName: \"kubernetes.io/projected/27855136-8ad1-414e-896b-97ff98a45f34-kube-api-access-v88zs\") pod \"nmstate-webhook-79c49d6bf4-kc7jr\" (UID: \"27855136-8ad1-414e-896b-97ff98a45f34\") " pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.356729 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.368806 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.384649 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.397761 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/d50d1712-3d8c-4212-9185-a4a2a186215c-nginx-conf\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.397864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsjwf\" (UniqueName: \"kubernetes.io/projected/d50d1712-3d8c-4212-9185-a4a2a186215c-kube-api-access-nsjwf\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.397894 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/d50d1712-3d8c-4212-9185-a4a2a186215c-plugin-serving-cert\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: E0606 09:25:48.398048 4911 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Jun 06 09:25:48 crc kubenswrapper[4911]: E0606 09:25:48.398177 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d50d1712-3d8c-4212-9185-a4a2a186215c-plugin-serving-cert podName:d50d1712-3d8c-4212-9185-a4a2a186215c nodeName:}" failed. No retries permitted until 2025-06-06 09:25:48.89815153 +0000 UTC m=+760.173577093 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/d50d1712-3d8c-4212-9185-a4a2a186215c-plugin-serving-cert") pod "nmstate-console-plugin-7d9b68456c-zdv9k" (UID: "d50d1712-3d8c-4212-9185-a4a2a186215c") : secret "plugin-serving-cert" not found Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.399286 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/d50d1712-3d8c-4212-9185-a4a2a186215c-nginx-conf\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.411766 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7f68688b7f-lmlkr"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.423233 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.431181 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7f68688b7f-lmlkr"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.433181 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsjwf\" (UniqueName: \"kubernetes.io/projected/d50d1712-3d8c-4212-9185-a4a2a186215c-kube-api-access-nsjwf\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.600673 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwwc7\" (UniqueName: \"kubernetes.io/projected/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-kube-api-access-bwwc7\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.600734 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-oauth-serving-cert\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.600765 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-service-ca\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.600783 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-serving-cert\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.600809 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-oauth-config\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.600836 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-trusted-ca-bundle\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.600992 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-config\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.689056 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-748555f888-rl4mq"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.703558 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwwc7\" (UniqueName: \"kubernetes.io/projected/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-kube-api-access-bwwc7\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.703748 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-oauth-serving-cert\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.703908 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-service-ca\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.704251 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-serving-cert\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.704527 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-oauth-config\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.704604 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-trusted-ca-bundle\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.704661 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-config\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.704907 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-oauth-serving-cert\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.704946 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-service-ca\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.704183 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" event={"ID":"7f26fb38-be4a-43b1-b2ba-632ba751588c","Type":"ContainerStarted","Data":"069615ecbe6d3335675d9f289b3c5b6564fed08e08d7e4cd7e8cf350ac92ac33"} Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.706258 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-47p8t" event={"ID":"cbc7d963-711d-4639-a21e-f5876e0aec15","Type":"ContainerStarted","Data":"78355281d897a5156f849ca0599365683adf4edfa627f3dd8e87e154f8393ef9"} Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.706558 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-config\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.706822 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-trusted-ca-bundle\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.709344 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-serving-cert\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.709440 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-console-oauth-config\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.720439 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwwc7\" (UniqueName: \"kubernetes.io/projected/0c6b02ce-0b72-44e0-87e2-c5784fc11a53-kube-api-access-bwwc7\") pod \"console-7f68688b7f-lmlkr\" (UID: \"0c6b02ce-0b72-44e0-87e2-c5784fc11a53\") " pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.784054 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:48 crc kubenswrapper[4911]: W0606 09:25:48.836400 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27855136_8ad1_414e_896b_97ff98a45f34.slice/crio-582fdbfe7cac0c534a2a5bb0899d88f33c767f8749647c86e2be8551d91153c6 WatchSource:0}: Error finding container 582fdbfe7cac0c534a2a5bb0899d88f33c767f8749647c86e2be8551d91153c6: Status 404 returned error can't find the container with id 582fdbfe7cac0c534a2a5bb0899d88f33c767f8749647c86e2be8551d91153c6 Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.836874 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr"] Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.907596 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/d50d1712-3d8c-4212-9185-a4a2a186215c-plugin-serving-cert\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:48 crc kubenswrapper[4911]: I0606 09:25:48.912428 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/d50d1712-3d8c-4212-9185-a4a2a186215c-plugin-serving-cert\") pod \"nmstate-console-plugin-7d9b68456c-zdv9k\" (UID: \"d50d1712-3d8c-4212-9185-a4a2a186215c\") " pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.110751 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.210726 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7f68688b7f-lmlkr"] Jun 06 09:25:49 crc kubenswrapper[4911]: W0606 09:25:49.226167 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c6b02ce_0b72_44e0_87e2_c5784fc11a53.slice/crio-ba91fd979f0d4acfe1b0c92785a39afc2587983c075be7e8740c5ecb9d65d17d WatchSource:0}: Error finding container ba91fd979f0d4acfe1b0c92785a39afc2587983c075be7e8740c5ecb9d65d17d: Status 404 returned error can't find the container with id ba91fd979f0d4acfe1b0c92785a39afc2587983c075be7e8740c5ecb9d65d17d Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.520778 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k"] Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.720258 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" event={"ID":"d50d1712-3d8c-4212-9185-a4a2a186215c","Type":"ContainerStarted","Data":"fa66cb5b43194785c37069ae54918a847bc2e13f7434f3497f4bac83fcdc9d6d"} Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.723388 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" event={"ID":"27855136-8ad1-414e-896b-97ff98a45f34","Type":"ContainerStarted","Data":"582fdbfe7cac0c534a2a5bb0899d88f33c767f8749647c86e2be8551d91153c6"} Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.725824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7f68688b7f-lmlkr" event={"ID":"0c6b02ce-0b72-44e0-87e2-c5784fc11a53","Type":"ContainerStarted","Data":"ea10396209cec0681649187da981bd1585a55233f8fc59f4f761febc510b6755"} Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.725867 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7f68688b7f-lmlkr" event={"ID":"0c6b02ce-0b72-44e0-87e2-c5784fc11a53","Type":"ContainerStarted","Data":"ba91fd979f0d4acfe1b0c92785a39afc2587983c075be7e8740c5ecb9d65d17d"} Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.749155 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7f68688b7f-lmlkr" podStartSLOduration=1.749130404 podStartE2EDuration="1.749130404s" podCreationTimestamp="2025-06-06 09:25:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:25:49.745363596 +0000 UTC m=+761.020789149" watchObservedRunningTime="2025-06-06 09:25:49.749130404 +0000 UTC m=+761.024555947" Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.935188 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.935243 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:49 crc kubenswrapper[4911]: I0606 09:25:49.993487 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:50 crc kubenswrapper[4911]: I0606 09:25:50.778464 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:50 crc kubenswrapper[4911]: I0606 09:25:50.828598 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-khszf"] Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.749221 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" event={"ID":"27855136-8ad1-414e-896b-97ff98a45f34","Type":"ContainerStarted","Data":"98c46ddb1c2bb87e1afd2062055e889af75d6d5cf239aafec553bf98a2de2a74"} Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.749572 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.754754 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" event={"ID":"7f26fb38-be4a-43b1-b2ba-632ba751588c","Type":"ContainerStarted","Data":"93148429cd1445b7de1da6ea7d3a11fe88332ae2b4cdcff0cc86e83d4a127ed1"} Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.754793 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" event={"ID":"7f26fb38-be4a-43b1-b2ba-632ba751588c","Type":"ContainerStarted","Data":"46ed0bc0fa658b1fac0718304cbbdb16c6cc2e0a63723e93467830376050389d"} Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.757750 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-47p8t" event={"ID":"cbc7d963-711d-4639-a21e-f5876e0aec15","Type":"ContainerStarted","Data":"2e18aa27156ffb67479f40fa009cb419d5d6eed6b8818d1b6bd19f48dbc1d73f"} Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.772953 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" podStartSLOduration=1.969750437 podStartE2EDuration="3.772923593s" podCreationTimestamp="2025-06-06 09:25:48 +0000 UTC" firstStartedPulling="2025-06-06 09:25:48.840476048 +0000 UTC m=+760.115901591" lastFinishedPulling="2025-06-06 09:25:50.643649204 +0000 UTC m=+761.919074747" observedRunningTime="2025-06-06 09:25:51.76736936 +0000 UTC m=+763.042794903" watchObservedRunningTime="2025-06-06 09:25:51.772923593 +0000 UTC m=+763.048349136" Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.786952 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-748555f888-rl4mq" podStartSLOduration=1.843995337 podStartE2EDuration="3.786933606s" podCreationTimestamp="2025-06-06 09:25:48 +0000 UTC" firstStartedPulling="2025-06-06 09:25:48.699691138 +0000 UTC m=+759.975116681" lastFinishedPulling="2025-06-06 09:25:50.642629417 +0000 UTC m=+761.918054950" observedRunningTime="2025-06-06 09:25:51.785036897 +0000 UTC m=+763.060462450" watchObservedRunningTime="2025-06-06 09:25:51.786933606 +0000 UTC m=+763.062359149" Jun 06 09:25:51 crc kubenswrapper[4911]: I0606 09:25:51.803111 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-47p8t" podStartSLOduration=1.612455979 podStartE2EDuration="3.803067263s" podCreationTimestamp="2025-06-06 09:25:48 +0000 UTC" firstStartedPulling="2025-06-06 09:25:48.452491536 +0000 UTC m=+759.727917079" lastFinishedPulling="2025-06-06 09:25:50.64310282 +0000 UTC m=+761.918528363" observedRunningTime="2025-06-06 09:25:51.799046929 +0000 UTC m=+763.074472482" watchObservedRunningTime="2025-06-06 09:25:51.803067263 +0000 UTC m=+763.078492806" Jun 06 09:25:52 crc kubenswrapper[4911]: I0606 09:25:52.766628 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" event={"ID":"d50d1712-3d8c-4212-9185-a4a2a186215c","Type":"ContainerStarted","Data":"6485be72652d09cf6081f0e5f151a94d40119a048c27e7ccee6949354c15fc06"} Jun 06 09:25:52 crc kubenswrapper[4911]: I0606 09:25:52.766837 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-khszf" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="registry-server" containerID="cri-o://ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3" gracePeriod=2 Jun 06 09:25:52 crc kubenswrapper[4911]: I0606 09:25:52.770996 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:52 crc kubenswrapper[4911]: I0606 09:25:52.785938 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7d9b68456c-zdv9k" podStartSLOduration=2.411973613 podStartE2EDuration="4.785916397s" podCreationTimestamp="2025-06-06 09:25:48 +0000 UTC" firstStartedPulling="2025-06-06 09:25:49.538817635 +0000 UTC m=+760.814243178" lastFinishedPulling="2025-06-06 09:25:51.912760409 +0000 UTC m=+763.188185962" observedRunningTime="2025-06-06 09:25:52.785678431 +0000 UTC m=+764.061103974" watchObservedRunningTime="2025-06-06 09:25:52.785916397 +0000 UTC m=+764.061341940" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.209731 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.371153 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjh6v\" (UniqueName: \"kubernetes.io/projected/0da2e712-e087-4f30-914b-3ee685e54ee6-kube-api-access-bjh6v\") pod \"0da2e712-e087-4f30-914b-3ee685e54ee6\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.371490 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-catalog-content\") pod \"0da2e712-e087-4f30-914b-3ee685e54ee6\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.371583 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-utilities\") pod \"0da2e712-e087-4f30-914b-3ee685e54ee6\" (UID: \"0da2e712-e087-4f30-914b-3ee685e54ee6\") " Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.372470 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-utilities" (OuterVolumeSpecName: "utilities") pod "0da2e712-e087-4f30-914b-3ee685e54ee6" (UID: "0da2e712-e087-4f30-914b-3ee685e54ee6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.377902 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0da2e712-e087-4f30-914b-3ee685e54ee6-kube-api-access-bjh6v" (OuterVolumeSpecName: "kube-api-access-bjh6v") pod "0da2e712-e087-4f30-914b-3ee685e54ee6" (UID: "0da2e712-e087-4f30-914b-3ee685e54ee6"). InnerVolumeSpecName "kube-api-access-bjh6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.410300 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0da2e712-e087-4f30-914b-3ee685e54ee6" (UID: "0da2e712-e087-4f30-914b-3ee685e54ee6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.472759 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjh6v\" (UniqueName: \"kubernetes.io/projected/0da2e712-e087-4f30-914b-3ee685e54ee6-kube-api-access-bjh6v\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.472815 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.472828 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0da2e712-e087-4f30-914b-3ee685e54ee6-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.775332 4911 generic.go:334] "Generic (PLEG): container finished" podID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerID="ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3" exitCode=0 Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.775391 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-khszf" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.775425 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khszf" event={"ID":"0da2e712-e087-4f30-914b-3ee685e54ee6","Type":"ContainerDied","Data":"ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3"} Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.775473 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-khszf" event={"ID":"0da2e712-e087-4f30-914b-3ee685e54ee6","Type":"ContainerDied","Data":"c844ccdeb2b6b1ab631f043e29a1f2dab9ca432dff341f54599ebc88388612b8"} Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.775490 4911 scope.go:117] "RemoveContainer" containerID="ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.792086 4911 scope.go:117] "RemoveContainer" containerID="7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.800642 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-khszf"] Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.804758 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-khszf"] Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.830019 4911 scope.go:117] "RemoveContainer" containerID="b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.846016 4911 scope.go:117] "RemoveContainer" containerID="ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3" Jun 06 09:25:53 crc kubenswrapper[4911]: E0606 09:25:53.846596 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3\": container with ID starting with ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3 not found: ID does not exist" containerID="ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.846652 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3"} err="failed to get container status \"ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3\": rpc error: code = NotFound desc = could not find container \"ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3\": container with ID starting with ea3ec83a200807e15f7a65cf0f88546b9e526b03349a8f2bb432074c7100f8b3 not found: ID does not exist" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.846687 4911 scope.go:117] "RemoveContainer" containerID="7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7" Jun 06 09:25:53 crc kubenswrapper[4911]: E0606 09:25:53.847066 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7\": container with ID starting with 7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7 not found: ID does not exist" containerID="7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.847132 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7"} err="failed to get container status \"7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7\": rpc error: code = NotFound desc = could not find container \"7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7\": container with ID starting with 7e5790b43a806e62f514d4f4d9c77f1bd0bff390f25e92ceb5a939b0d2bfe2e7 not found: ID does not exist" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.847167 4911 scope.go:117] "RemoveContainer" containerID="b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1" Jun 06 09:25:53 crc kubenswrapper[4911]: E0606 09:25:53.848178 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1\": container with ID starting with b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1 not found: ID does not exist" containerID="b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.848216 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1"} err="failed to get container status \"b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1\": rpc error: code = NotFound desc = could not find container \"b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1\": container with ID starting with b23494143484668d3cef863d7b72bd46f006fb0086e12259b8fb2d89659efad1 not found: ID does not exist" Jun 06 09:25:53 crc kubenswrapper[4911]: I0606 09:25:53.956290 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" path="/var/lib/kubelet/pods/0da2e712-e087-4f30-914b-3ee685e54ee6/volumes" Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.300172 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.300294 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.300376 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.301419 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"00a6b7c957b04d6217e597e04550f48b111aa26d7d1dc819e0c3ab94dfcdb9d6"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.301511 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://00a6b7c957b04d6217e597e04550f48b111aa26d7d1dc819e0c3ab94dfcdb9d6" gracePeriod=600 Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.784795 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="00a6b7c957b04d6217e597e04550f48b111aa26d7d1dc819e0c3ab94dfcdb9d6" exitCode=0 Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.784869 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"00a6b7c957b04d6217e597e04550f48b111aa26d7d1dc819e0c3ab94dfcdb9d6"} Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.785841 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"7e9cf2d10460184da893979bc2c9b0439be07aa529d8fb6fe4706920d07685c7"} Jun 06 09:25:54 crc kubenswrapper[4911]: I0606 09:25:54.785874 4911 scope.go:117] "RemoveContainer" containerID="16351863e8c8c00f3cd092f5ca35626763ec65854e86cd76f5f564cdc9f7a3ca" Jun 06 09:25:58 crc kubenswrapper[4911]: I0606 09:25:58.411792 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-47p8t" Jun 06 09:25:58 crc kubenswrapper[4911]: I0606 09:25:58.784788 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:58 crc kubenswrapper[4911]: I0606 09:25:58.784868 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:58 crc kubenswrapper[4911]: I0606 09:25:58.790884 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:58 crc kubenswrapper[4911]: I0606 09:25:58.820028 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7f68688b7f-lmlkr" Jun 06 09:25:58 crc kubenswrapper[4911]: I0606 09:25:58.871298 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-4dcwp"] Jun 06 09:26:01 crc kubenswrapper[4911]: I0606 09:26:01.940127 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-jpnpr"] Jun 06 09:26:01 crc kubenswrapper[4911]: E0606 09:26:01.940901 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="extract-utilities" Jun 06 09:26:01 crc kubenswrapper[4911]: I0606 09:26:01.940914 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="extract-utilities" Jun 06 09:26:01 crc kubenswrapper[4911]: E0606 09:26:01.940924 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="registry-server" Jun 06 09:26:01 crc kubenswrapper[4911]: I0606 09:26:01.940931 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="registry-server" Jun 06 09:26:01 crc kubenswrapper[4911]: E0606 09:26:01.940946 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="extract-content" Jun 06 09:26:01 crc kubenswrapper[4911]: I0606 09:26:01.940952 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="extract-content" Jun 06 09:26:01 crc kubenswrapper[4911]: I0606 09:26:01.941125 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0da2e712-e087-4f30-914b-3ee685e54ee6" containerName="registry-server" Jun 06 09:26:01 crc kubenswrapper[4911]: I0606 09:26:01.941662 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.008205 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjvz7\" (UniqueName: \"kubernetes.io/projected/ea0991c2-65d1-477d-9058-14a67d39ea74-kube-api-access-hjvz7\") pod \"crc-debug-jpnpr\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.008343 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ea0991c2-65d1-477d-9058-14a67d39ea74-host\") pod \"crc-debug-jpnpr\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.109159 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ea0991c2-65d1-477d-9058-14a67d39ea74-host\") pod \"crc-debug-jpnpr\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.109291 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjvz7\" (UniqueName: \"kubernetes.io/projected/ea0991c2-65d1-477d-9058-14a67d39ea74-kube-api-access-hjvz7\") pod \"crc-debug-jpnpr\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.109301 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ea0991c2-65d1-477d-9058-14a67d39ea74-host\") pod \"crc-debug-jpnpr\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.131495 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjvz7\" (UniqueName: \"kubernetes.io/projected/ea0991c2-65d1-477d-9058-14a67d39ea74-kube-api-access-hjvz7\") pod \"crc-debug-jpnpr\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.262930 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-jpnpr" Jun 06 09:26:02 crc kubenswrapper[4911]: W0606 09:26:02.288654 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea0991c2_65d1_477d_9058_14a67d39ea74.slice/crio-959ed8390c28088d7ab232514043cd2890d9416114e3ce764d76f9a41739216c WatchSource:0}: Error finding container 959ed8390c28088d7ab232514043cd2890d9416114e3ce764d76f9a41739216c: Status 404 returned error can't find the container with id 959ed8390c28088d7ab232514043cd2890d9416114e3ce764d76f9a41739216c Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.848974 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-jpnpr" event={"ID":"ea0991c2-65d1-477d-9058-14a67d39ea74","Type":"ContainerStarted","Data":"19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61"} Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.849532 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-jpnpr" event={"ID":"ea0991c2-65d1-477d-9058-14a67d39ea74","Type":"ContainerStarted","Data":"959ed8390c28088d7ab232514043cd2890d9416114e3ce764d76f9a41739216c"} Jun 06 09:26:02 crc kubenswrapper[4911]: I0606 09:26:02.866940 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-jpnpr" podStartSLOduration=1.866916377 podStartE2EDuration="1.866916377s" podCreationTimestamp="2025-06-06 09:26:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:26:02.863595121 +0000 UTC m=+774.139020664" watchObservedRunningTime="2025-06-06 09:26:02.866916377 +0000 UTC m=+774.142341910" Jun 06 09:26:08 crc kubenswrapper[4911]: I0606 09:26:08.374890 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-79c49d6bf4-kc7jr" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.635603 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-jpnpr"] Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.636635 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-jpnpr" podUID="ea0991c2-65d1-477d-9058-14a67d39ea74" containerName="container-00" containerID="cri-o://19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61" gracePeriod=2 Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.638822 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-jpnpr"] Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.740540 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-jpnpr" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.863559 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjvz7\" (UniqueName: \"kubernetes.io/projected/ea0991c2-65d1-477d-9058-14a67d39ea74-kube-api-access-hjvz7\") pod \"ea0991c2-65d1-477d-9058-14a67d39ea74\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.863674 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ea0991c2-65d1-477d-9058-14a67d39ea74-host\") pod \"ea0991c2-65d1-477d-9058-14a67d39ea74\" (UID: \"ea0991c2-65d1-477d-9058-14a67d39ea74\") " Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.863797 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ea0991c2-65d1-477d-9058-14a67d39ea74-host" (OuterVolumeSpecName: "host") pod "ea0991c2-65d1-477d-9058-14a67d39ea74" (UID: "ea0991c2-65d1-477d-9058-14a67d39ea74"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.863954 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ea0991c2-65d1-477d-9058-14a67d39ea74-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.868639 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea0991c2-65d1-477d-9058-14a67d39ea74-kube-api-access-hjvz7" (OuterVolumeSpecName: "kube-api-access-hjvz7") pod "ea0991c2-65d1-477d-9058-14a67d39ea74" (UID: "ea0991c2-65d1-477d-9058-14a67d39ea74"). InnerVolumeSpecName "kube-api-access-hjvz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.905386 4911 generic.go:334] "Generic (PLEG): container finished" podID="ea0991c2-65d1-477d-9058-14a67d39ea74" containerID="19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61" exitCode=0 Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.905442 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-jpnpr" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.905515 4911 scope.go:117] "RemoveContainer" containerID="19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.924497 4911 scope.go:117] "RemoveContainer" containerID="19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61" Jun 06 09:26:12 crc kubenswrapper[4911]: E0606 09:26:12.925165 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61\": container with ID starting with 19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61 not found: ID does not exist" containerID="19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.925219 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61"} err="failed to get container status \"19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61\": rpc error: code = NotFound desc = could not find container \"19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61\": container with ID starting with 19e4a4ee37715489a6e02b945b4340c6e7758cd1fe96bb1cc7d2f3251148fc61 not found: ID does not exist" Jun 06 09:26:12 crc kubenswrapper[4911]: I0606 09:26:12.965653 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjvz7\" (UniqueName: \"kubernetes.io/projected/ea0991c2-65d1-477d-9058-14a67d39ea74-kube-api-access-hjvz7\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:13 crc kubenswrapper[4911]: I0606 09:26:13.955142 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea0991c2-65d1-477d-9058-14a67d39ea74" path="/var/lib/kubelet/pods/ea0991c2-65d1-477d-9058-14a67d39ea74/volumes" Jun 06 09:26:23 crc kubenswrapper[4911]: I0606 09:26:23.913188 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-4dcwp" podUID="69127e92-f707-4b41-a690-9fd917998557" containerName="console" containerID="cri-o://ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a" gracePeriod=15 Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.325848 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-4dcwp_69127e92-f707-4b41-a690-9fd917998557/console/0.log" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.326211 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.520933 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-trusted-ca-bundle\") pod \"69127e92-f707-4b41-a690-9fd917998557\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.521441 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-service-ca\") pod \"69127e92-f707-4b41-a690-9fd917998557\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.521563 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-oauth-config\") pod \"69127e92-f707-4b41-a690-9fd917998557\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.521586 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-console-config\") pod \"69127e92-f707-4b41-a690-9fd917998557\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.521632 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-serving-cert\") pod \"69127e92-f707-4b41-a690-9fd917998557\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.521728 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-oauth-serving-cert\") pod \"69127e92-f707-4b41-a690-9fd917998557\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.521748 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7zx8\" (UniqueName: \"kubernetes.io/projected/69127e92-f707-4b41-a690-9fd917998557-kube-api-access-p7zx8\") pod \"69127e92-f707-4b41-a690-9fd917998557\" (UID: \"69127e92-f707-4b41-a690-9fd917998557\") " Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.522071 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "69127e92-f707-4b41-a690-9fd917998557" (UID: "69127e92-f707-4b41-a690-9fd917998557"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.522114 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-service-ca" (OuterVolumeSpecName: "service-ca") pod "69127e92-f707-4b41-a690-9fd917998557" (UID: "69127e92-f707-4b41-a690-9fd917998557"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.522312 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-console-config" (OuterVolumeSpecName: "console-config") pod "69127e92-f707-4b41-a690-9fd917998557" (UID: "69127e92-f707-4b41-a690-9fd917998557"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.522752 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "69127e92-f707-4b41-a690-9fd917998557" (UID: "69127e92-f707-4b41-a690-9fd917998557"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.523239 4911 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.523278 4911 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.523290 4911 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-service-ca\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.523317 4911 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/69127e92-f707-4b41-a690-9fd917998557-console-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.527054 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "69127e92-f707-4b41-a690-9fd917998557" (UID: "69127e92-f707-4b41-a690-9fd917998557"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.527546 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69127e92-f707-4b41-a690-9fd917998557-kube-api-access-p7zx8" (OuterVolumeSpecName: "kube-api-access-p7zx8") pod "69127e92-f707-4b41-a690-9fd917998557" (UID: "69127e92-f707-4b41-a690-9fd917998557"). InnerVolumeSpecName "kube-api-access-p7zx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.532321 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "69127e92-f707-4b41-a690-9fd917998557" (UID: "69127e92-f707-4b41-a690-9fd917998557"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.625015 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7zx8\" (UniqueName: \"kubernetes.io/projected/69127e92-f707-4b41-a690-9fd917998557-kube-api-access-p7zx8\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.625062 4911 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.625075 4911 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/69127e92-f707-4b41-a690-9fd917998557-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.990751 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-4dcwp_69127e92-f707-4b41-a690-9fd917998557/console/0.log" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.991044 4911 generic.go:334] "Generic (PLEG): container finished" podID="69127e92-f707-4b41-a690-9fd917998557" containerID="ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a" exitCode=2 Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.991278 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-4dcwp" event={"ID":"69127e92-f707-4b41-a690-9fd917998557","Type":"ContainerDied","Data":"ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a"} Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.991312 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-4dcwp" event={"ID":"69127e92-f707-4b41-a690-9fd917998557","Type":"ContainerDied","Data":"6b4d55365de2ddd5d4f4f944202c0e141b3bb15a2f673d0894dd997aca5408fd"} Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.991330 4911 scope.go:117] "RemoveContainer" containerID="ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a" Jun 06 09:26:24 crc kubenswrapper[4911]: I0606 09:26:24.991330 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-4dcwp" Jun 06 09:26:25 crc kubenswrapper[4911]: I0606 09:26:25.020886 4911 scope.go:117] "RemoveContainer" containerID="ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a" Jun 06 09:26:25 crc kubenswrapper[4911]: E0606 09:26:25.021772 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a\": container with ID starting with ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a not found: ID does not exist" containerID="ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a" Jun 06 09:26:25 crc kubenswrapper[4911]: I0606 09:26:25.021817 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a"} err="failed to get container status \"ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a\": rpc error: code = NotFound desc = could not find container \"ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a\": container with ID starting with ae2a66abc3dd69f2c91c6b97162d9060b91d8eea1c6538ecbb698dd65813667a not found: ID does not exist" Jun 06 09:26:25 crc kubenswrapper[4911]: I0606 09:26:25.021867 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-4dcwp"] Jun 06 09:26:25 crc kubenswrapper[4911]: I0606 09:26:25.025853 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-4dcwp"] Jun 06 09:26:25 crc kubenswrapper[4911]: I0606 09:26:25.959970 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69127e92-f707-4b41-a690-9fd917998557" path="/var/lib/kubelet/pods/69127e92-f707-4b41-a690-9fd917998557/volumes" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.288844 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-nddcs"] Jun 06 09:26:34 crc kubenswrapper[4911]: E0606 09:26:34.289650 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69127e92-f707-4b41-a690-9fd917998557" containerName="console" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.289662 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="69127e92-f707-4b41-a690-9fd917998557" containerName="console" Jun 06 09:26:34 crc kubenswrapper[4911]: E0606 09:26:34.289673 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea0991c2-65d1-477d-9058-14a67d39ea74" containerName="container-00" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.289680 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea0991c2-65d1-477d-9058-14a67d39ea74" containerName="container-00" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.289782 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea0991c2-65d1-477d-9058-14a67d39ea74" containerName="container-00" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.289794 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="69127e92-f707-4b41-a690-9fd917998557" containerName="console" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.290277 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nddcs" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.292399 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.292436 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-p2jmk" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.296455 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.299696 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nddcs"] Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.469787 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z96gc\" (UniqueName: \"kubernetes.io/projected/9cbd6684-5a9e-478d-8558-cc32cb40d9bd-kube-api-access-z96gc\") pod \"openstack-operator-index-nddcs\" (UID: \"9cbd6684-5a9e-478d-8558-cc32cb40d9bd\") " pod="openstack-operators/openstack-operator-index-nddcs" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.571145 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z96gc\" (UniqueName: \"kubernetes.io/projected/9cbd6684-5a9e-478d-8558-cc32cb40d9bd-kube-api-access-z96gc\") pod \"openstack-operator-index-nddcs\" (UID: \"9cbd6684-5a9e-478d-8558-cc32cb40d9bd\") " pod="openstack-operators/openstack-operator-index-nddcs" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.591375 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z96gc\" (UniqueName: \"kubernetes.io/projected/9cbd6684-5a9e-478d-8558-cc32cb40d9bd-kube-api-access-z96gc\") pod \"openstack-operator-index-nddcs\" (UID: \"9cbd6684-5a9e-478d-8558-cc32cb40d9bd\") " pod="openstack-operators/openstack-operator-index-nddcs" Jun 06 09:26:34 crc kubenswrapper[4911]: I0606 09:26:34.621230 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nddcs" Jun 06 09:26:35 crc kubenswrapper[4911]: I0606 09:26:35.032279 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nddcs"] Jun 06 09:26:35 crc kubenswrapper[4911]: I0606 09:26:35.055478 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nddcs" event={"ID":"9cbd6684-5a9e-478d-8558-cc32cb40d9bd","Type":"ContainerStarted","Data":"816377df832646e03fca03d61df1c9dd0086fe550a0c1bdf2fe104f156022b92"} Jun 06 09:26:37 crc kubenswrapper[4911]: I0606 09:26:37.072338 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nddcs" event={"ID":"9cbd6684-5a9e-478d-8558-cc32cb40d9bd","Type":"ContainerStarted","Data":"e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f"} Jun 06 09:26:37 crc kubenswrapper[4911]: I0606 09:26:37.091728 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-nddcs" podStartSLOduration=1.22088298 podStartE2EDuration="3.091711066s" podCreationTimestamp="2025-06-06 09:26:34 +0000 UTC" firstStartedPulling="2025-06-06 09:26:35.039010727 +0000 UTC m=+806.314436270" lastFinishedPulling="2025-06-06 09:26:36.909838813 +0000 UTC m=+808.185264356" observedRunningTime="2025-06-06 09:26:37.08996022 +0000 UTC m=+808.365385773" watchObservedRunningTime="2025-06-06 09:26:37.091711066 +0000 UTC m=+808.367136609" Jun 06 09:26:37 crc kubenswrapper[4911]: I0606 09:26:37.466994 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-nddcs"] Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.072450 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-lhvml"] Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.073415 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.083280 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-lhvml"] Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.217907 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpdxx\" (UniqueName: \"kubernetes.io/projected/f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e-kube-api-access-jpdxx\") pod \"openstack-operator-index-lhvml\" (UID: \"f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e\") " pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.319653 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpdxx\" (UniqueName: \"kubernetes.io/projected/f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e-kube-api-access-jpdxx\") pod \"openstack-operator-index-lhvml\" (UID: \"f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e\") " pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.340966 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpdxx\" (UniqueName: \"kubernetes.io/projected/f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e-kube-api-access-jpdxx\") pod \"openstack-operator-index-lhvml\" (UID: \"f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e\") " pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.392797 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:38 crc kubenswrapper[4911]: I0606 09:26:38.782181 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-lhvml"] Jun 06 09:26:38 crc kubenswrapper[4911]: W0606 09:26:38.790988 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0aa4dae_1e09_43b9_8b7e_380ab59d1d2e.slice/crio-5be6d900fc8651138f48842df77a26a5e94dac7a697062371d8bb2ca9d88412d WatchSource:0}: Error finding container 5be6d900fc8651138f48842df77a26a5e94dac7a697062371d8bb2ca9d88412d: Status 404 returned error can't find the container with id 5be6d900fc8651138f48842df77a26a5e94dac7a697062371d8bb2ca9d88412d Jun 06 09:26:39 crc kubenswrapper[4911]: I0606 09:26:39.086948 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-lhvml" event={"ID":"f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e","Type":"ContainerStarted","Data":"5be6d900fc8651138f48842df77a26a5e94dac7a697062371d8bb2ca9d88412d"} Jun 06 09:26:39 crc kubenswrapper[4911]: I0606 09:26:39.087083 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-nddcs" podUID="9cbd6684-5a9e-478d-8558-cc32cb40d9bd" containerName="registry-server" containerID="cri-o://e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f" gracePeriod=2 Jun 06 09:26:39 crc kubenswrapper[4911]: I0606 09:26:39.486553 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nddcs" Jun 06 09:26:39 crc kubenswrapper[4911]: I0606 09:26:39.637395 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z96gc\" (UniqueName: \"kubernetes.io/projected/9cbd6684-5a9e-478d-8558-cc32cb40d9bd-kube-api-access-z96gc\") pod \"9cbd6684-5a9e-478d-8558-cc32cb40d9bd\" (UID: \"9cbd6684-5a9e-478d-8558-cc32cb40d9bd\") " Jun 06 09:26:39 crc kubenswrapper[4911]: I0606 09:26:39.643140 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cbd6684-5a9e-478d-8558-cc32cb40d9bd-kube-api-access-z96gc" (OuterVolumeSpecName: "kube-api-access-z96gc") pod "9cbd6684-5a9e-478d-8558-cc32cb40d9bd" (UID: "9cbd6684-5a9e-478d-8558-cc32cb40d9bd"). InnerVolumeSpecName "kube-api-access-z96gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:26:39 crc kubenswrapper[4911]: I0606 09:26:39.739018 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z96gc\" (UniqueName: \"kubernetes.io/projected/9cbd6684-5a9e-478d-8558-cc32cb40d9bd-kube-api-access-z96gc\") on node \"crc\" DevicePath \"\"" Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.097832 4911 generic.go:334] "Generic (PLEG): container finished" podID="9cbd6684-5a9e-478d-8558-cc32cb40d9bd" containerID="e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f" exitCode=0 Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.097911 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nddcs" Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.097915 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nddcs" event={"ID":"9cbd6684-5a9e-478d-8558-cc32cb40d9bd","Type":"ContainerDied","Data":"e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f"} Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.098493 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nddcs" event={"ID":"9cbd6684-5a9e-478d-8558-cc32cb40d9bd","Type":"ContainerDied","Data":"816377df832646e03fca03d61df1c9dd0086fe550a0c1bdf2fe104f156022b92"} Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.098520 4911 scope.go:117] "RemoveContainer" containerID="e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f" Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.101060 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-lhvml" event={"ID":"f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e","Type":"ContainerStarted","Data":"658ad574f7e4d1a1a291b9f63cac643bee9ec08f1fbc5015ea9c8f6b75d9c814"} Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.120555 4911 scope.go:117] "RemoveContainer" containerID="e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f" Jun 06 09:26:40 crc kubenswrapper[4911]: E0606 09:26:40.121441 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f\": container with ID starting with e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f not found: ID does not exist" containerID="e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f" Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.121571 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f"} err="failed to get container status \"e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f\": rpc error: code = NotFound desc = could not find container \"e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f\": container with ID starting with e2a23b57b14e6e259fd8faa04870d1204e9f824a9f304975ecfc702435b10c7f not found: ID does not exist" Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.122246 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-lhvml" podStartSLOduration=1.812707853 podStartE2EDuration="2.122227167s" podCreationTimestamp="2025-06-06 09:26:38 +0000 UTC" firstStartedPulling="2025-06-06 09:26:38.795131661 +0000 UTC m=+810.070557204" lastFinishedPulling="2025-06-06 09:26:39.104650975 +0000 UTC m=+810.380076518" observedRunningTime="2025-06-06 09:26:40.118817679 +0000 UTC m=+811.394243232" watchObservedRunningTime="2025-06-06 09:26:40.122227167 +0000 UTC m=+811.397652710" Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.136629 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-nddcs"] Jun 06 09:26:40 crc kubenswrapper[4911]: I0606 09:26:40.142073 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-nddcs"] Jun 06 09:26:41 crc kubenswrapper[4911]: I0606 09:26:41.955320 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cbd6684-5a9e-478d-8558-cc32cb40d9bd" path="/var/lib/kubelet/pods/9cbd6684-5a9e-478d-8558-cc32cb40d9bd/volumes" Jun 06 09:26:48 crc kubenswrapper[4911]: I0606 09:26:48.393387 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:48 crc kubenswrapper[4911]: I0606 09:26:48.393985 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:48 crc kubenswrapper[4911]: I0606 09:26:48.421262 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:49 crc kubenswrapper[4911]: I0606 09:26:49.178320 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-lhvml" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.911199 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw"] Jun 06 09:26:56 crc kubenswrapper[4911]: E0606 09:26:56.911942 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cbd6684-5a9e-478d-8558-cc32cb40d9bd" containerName="registry-server" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.911957 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cbd6684-5a9e-478d-8558-cc32cb40d9bd" containerName="registry-server" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.912125 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cbd6684-5a9e-478d-8558-cc32cb40d9bd" containerName="registry-server" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.913087 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.918404 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw"] Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.919002 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-4rkqw" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.995017 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-util\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.995529 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7tmx\" (UniqueName: \"kubernetes.io/projected/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-kube-api-access-n7tmx\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:56 crc kubenswrapper[4911]: I0606 09:26:56.995581 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-bundle\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.097353 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-util\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.097431 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7tmx\" (UniqueName: \"kubernetes.io/projected/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-kube-api-access-n7tmx\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.097515 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-bundle\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.098041 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-util\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.098063 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-bundle\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.118948 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7tmx\" (UniqueName: \"kubernetes.io/projected/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-kube-api-access-n7tmx\") pod \"3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.242594 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:26:57 crc kubenswrapper[4911]: I0606 09:26:57.435020 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw"] Jun 06 09:26:58 crc kubenswrapper[4911]: I0606 09:26:58.210312 4911 generic.go:334] "Generic (PLEG): container finished" podID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerID="c79caf56d288a696316a61686afbe93cf72d9b0621fa278f3b0b7556d170bfa7" exitCode=0 Jun 06 09:26:58 crc kubenswrapper[4911]: I0606 09:26:58.210361 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" event={"ID":"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5","Type":"ContainerDied","Data":"c79caf56d288a696316a61686afbe93cf72d9b0621fa278f3b0b7556d170bfa7"} Jun 06 09:26:58 crc kubenswrapper[4911]: I0606 09:26:58.210388 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" event={"ID":"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5","Type":"ContainerStarted","Data":"ce8c081a67756a4cacf2572346e6f813dc8eb49ddba4731b196df9d466297c5b"} Jun 06 09:26:58 crc kubenswrapper[4911]: I0606 09:26:58.213012 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:26:59 crc kubenswrapper[4911]: I0606 09:26:59.226260 4911 generic.go:334] "Generic (PLEG): container finished" podID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerID="58d7386d05509c10617a32755060420ed2d3efd47f1909506d2e33692911dc47" exitCode=0 Jun 06 09:26:59 crc kubenswrapper[4911]: I0606 09:26:59.226335 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" event={"ID":"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5","Type":"ContainerDied","Data":"58d7386d05509c10617a32755060420ed2d3efd47f1909506d2e33692911dc47"} Jun 06 09:27:00 crc kubenswrapper[4911]: I0606 09:27:00.237872 4911 generic.go:334] "Generic (PLEG): container finished" podID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerID="be95b270a6b7e5ec1f33b12381bd92494c20a2c460c9273001f41b1a27bd467a" exitCode=0 Jun 06 09:27:00 crc kubenswrapper[4911]: I0606 09:27:00.238022 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" event={"ID":"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5","Type":"ContainerDied","Data":"be95b270a6b7e5ec1f33b12381bd92494c20a2c460c9273001f41b1a27bd467a"} Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.511398 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.561892 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-bundle\") pod \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.562220 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-util\") pod \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.562242 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7tmx\" (UniqueName: \"kubernetes.io/projected/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-kube-api-access-n7tmx\") pod \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\" (UID: \"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5\") " Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.562685 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-bundle" (OuterVolumeSpecName: "bundle") pod "8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" (UID: "8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.567677 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-kube-api-access-n7tmx" (OuterVolumeSpecName: "kube-api-access-n7tmx") pod "8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" (UID: "8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5"). InnerVolumeSpecName "kube-api-access-n7tmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.575416 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-util" (OuterVolumeSpecName: "util") pod "8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" (UID: "8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.664234 4911 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-util\") on node \"crc\" DevicePath \"\"" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.664275 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7tmx\" (UniqueName: \"kubernetes.io/projected/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-kube-api-access-n7tmx\") on node \"crc\" DevicePath \"\"" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.664287 4911 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.963322 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/crc-debug-ckvs5"] Jun 06 09:27:01 crc kubenswrapper[4911]: E0606 09:27:01.963587 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerName="pull" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.963601 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerName="pull" Jun 06 09:27:01 crc kubenswrapper[4911]: E0606 09:27:01.963614 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerName="extract" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.963623 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerName="extract" Jun 06 09:27:01 crc kubenswrapper[4911]: E0606 09:27:01.963652 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerName="util" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.963661 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerName="util" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.963812 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5" containerName="extract" Jun 06 09:27:01 crc kubenswrapper[4911]: I0606 09:27:01.964284 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.069317 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4428e9c9-ac22-426f-90c0-1b9c741da641-host\") pod \"crc-debug-ckvs5\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.069644 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4fvh\" (UniqueName: \"kubernetes.io/projected/4428e9c9-ac22-426f-90c0-1b9c741da641-kube-api-access-h4fvh\") pod \"crc-debug-ckvs5\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.171530 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4fvh\" (UniqueName: \"kubernetes.io/projected/4428e9c9-ac22-426f-90c0-1b9c741da641-kube-api-access-h4fvh\") pod \"crc-debug-ckvs5\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.171623 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4428e9c9-ac22-426f-90c0-1b9c741da641-host\") pod \"crc-debug-ckvs5\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.171829 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4428e9c9-ac22-426f-90c0-1b9c741da641-host\") pod \"crc-debug-ckvs5\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.190649 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4fvh\" (UniqueName: \"kubernetes.io/projected/4428e9c9-ac22-426f-90c0-1b9c741da641-kube-api-access-h4fvh\") pod \"crc-debug-ckvs5\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.255640 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" event={"ID":"8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5","Type":"ContainerDied","Data":"ce8c081a67756a4cacf2572346e6f813dc8eb49ddba4731b196df9d466297c5b"} Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.255692 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce8c081a67756a4cacf2572346e6f813dc8eb49ddba4731b196df9d466297c5b" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.255772 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw" Jun 06 09:27:02 crc kubenswrapper[4911]: I0606 09:27:02.284108 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:02 crc kubenswrapper[4911]: W0606 09:27:02.304760 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4428e9c9_ac22_426f_90c0_1b9c741da641.slice/crio-e141dad2db589d80b390cf7031e92c753af93c5fc3add074ca64f7d7b55c91e0 WatchSource:0}: Error finding container e141dad2db589d80b390cf7031e92c753af93c5fc3add074ca64f7d7b55c91e0: Status 404 returned error can't find the container with id e141dad2db589d80b390cf7031e92c753af93c5fc3add074ca64f7d7b55c91e0 Jun 06 09:27:03 crc kubenswrapper[4911]: I0606 09:27:03.264834 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/crc-debug-ckvs5" event={"ID":"4428e9c9-ac22-426f-90c0-1b9c741da641","Type":"ContainerStarted","Data":"d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9"} Jun 06 09:27:03 crc kubenswrapper[4911]: I0606 09:27:03.265179 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/crc-debug-ckvs5" event={"ID":"4428e9c9-ac22-426f-90c0-1b9c741da641","Type":"ContainerStarted","Data":"e141dad2db589d80b390cf7031e92c753af93c5fc3add074ca64f7d7b55c91e0"} Jun 06 09:27:03 crc kubenswrapper[4911]: I0606 09:27:03.280595 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/crc-debug-ckvs5" podStartSLOduration=2.280578174 podStartE2EDuration="2.280578174s" podCreationTimestamp="2025-06-06 09:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:27:03.277921986 +0000 UTC m=+834.553347529" watchObservedRunningTime="2025-06-06 09:27:03.280578174 +0000 UTC m=+834.556003717" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.407121 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh"] Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.408392 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.410585 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-vsnvn" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.427955 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh"] Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.474535 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z5gk\" (UniqueName: \"kubernetes.io/projected/0003c307-6b38-4d34-a39c-8f1792405537-kube-api-access-2z5gk\") pod \"openstack-operator-controller-operator-69c87d8fd5-b85qh\" (UID: \"0003c307-6b38-4d34-a39c-8f1792405537\") " pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.575829 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z5gk\" (UniqueName: \"kubernetes.io/projected/0003c307-6b38-4d34-a39c-8f1792405537-kube-api-access-2z5gk\") pod \"openstack-operator-controller-operator-69c87d8fd5-b85qh\" (UID: \"0003c307-6b38-4d34-a39c-8f1792405537\") " pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.596344 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z5gk\" (UniqueName: \"kubernetes.io/projected/0003c307-6b38-4d34-a39c-8f1792405537-kube-api-access-2z5gk\") pod \"openstack-operator-controller-operator-69c87d8fd5-b85qh\" (UID: \"0003c307-6b38-4d34-a39c-8f1792405537\") " pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.731518 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-vsnvn" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.740163 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" Jun 06 09:27:09 crc kubenswrapper[4911]: I0606 09:27:09.967318 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh"] Jun 06 09:27:10 crc kubenswrapper[4911]: I0606 09:27:10.308213 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" event={"ID":"0003c307-6b38-4d34-a39c-8f1792405537","Type":"ContainerStarted","Data":"33dd43cb7a57b7b65e01a013c2dd3582ccb9ce0704152ce92d603b12d588409e"} Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.147576 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/crc-debug-ckvs5"] Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.148069 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/crc-debug-ckvs5" podUID="4428e9c9-ac22-426f-90c0-1b9c741da641" containerName="container-00" containerID="cri-o://d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9" gracePeriod=2 Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.152635 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/crc-debug-ckvs5"] Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.321190 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.337733 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4428e9c9-ac22-426f-90c0-1b9c741da641-host\") pod \"4428e9c9-ac22-426f-90c0-1b9c741da641\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.337816 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4fvh\" (UniqueName: \"kubernetes.io/projected/4428e9c9-ac22-426f-90c0-1b9c741da641-kube-api-access-h4fvh\") pod \"4428e9c9-ac22-426f-90c0-1b9c741da641\" (UID: \"4428e9c9-ac22-426f-90c0-1b9c741da641\") " Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.338297 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4428e9c9-ac22-426f-90c0-1b9c741da641-host" (OuterVolumeSpecName: "host") pod "4428e9c9-ac22-426f-90c0-1b9c741da641" (UID: "4428e9c9-ac22-426f-90c0-1b9c741da641"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.342760 4911 generic.go:334] "Generic (PLEG): container finished" podID="4428e9c9-ac22-426f-90c0-1b9c741da641" containerID="d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9" exitCode=0 Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.342838 4911 scope.go:117] "RemoveContainer" containerID="d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.342874 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-ckvs5" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.345292 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4428e9c9-ac22-426f-90c0-1b9c741da641-kube-api-access-h4fvh" (OuterVolumeSpecName: "kube-api-access-h4fvh") pod "4428e9c9-ac22-426f-90c0-1b9c741da641" (UID: "4428e9c9-ac22-426f-90c0-1b9c741da641"). InnerVolumeSpecName "kube-api-access-h4fvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.381979 4911 scope.go:117] "RemoveContainer" containerID="d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9" Jun 06 09:27:13 crc kubenswrapper[4911]: E0606 09:27:13.382546 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9\": container with ID starting with d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9 not found: ID does not exist" containerID="d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.382599 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9"} err="failed to get container status \"d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9\": rpc error: code = NotFound desc = could not find container \"d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9\": container with ID starting with d54a914d169f64cf615c74a5803d27c68df98602de6ad26e7dc95871d12b63c9 not found: ID does not exist" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.439439 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4fvh\" (UniqueName: \"kubernetes.io/projected/4428e9c9-ac22-426f-90c0-1b9c741da641-kube-api-access-h4fvh\") on node \"crc\" DevicePath \"\"" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.439478 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4428e9c9-ac22-426f-90c0-1b9c741da641-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:27:13 crc kubenswrapper[4911]: I0606 09:27:13.969200 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4428e9c9-ac22-426f-90c0-1b9c741da641" path="/var/lib/kubelet/pods/4428e9c9-ac22-426f-90c0-1b9c741da641/volumes" Jun 06 09:27:14 crc kubenswrapper[4911]: I0606 09:27:14.356727 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" event={"ID":"0003c307-6b38-4d34-a39c-8f1792405537","Type":"ContainerStarted","Data":"babe3d17225f61a7e36c91b97b920629f08ccc7c13d25d62aa469be9355056a5"} Jun 06 09:27:16 crc kubenswrapper[4911]: I0606 09:27:16.372046 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" event={"ID":"0003c307-6b38-4d34-a39c-8f1792405537","Type":"ContainerStarted","Data":"d3cb3d4b3f8f01e0cb3376302fd64ba2683b715d425e77e99b2061d681df0221"} Jun 06 09:27:16 crc kubenswrapper[4911]: I0606 09:27:16.372353 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" Jun 06 09:27:16 crc kubenswrapper[4911]: I0606 09:27:16.398561 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" podStartSLOduration=1.509431714 podStartE2EDuration="7.39854151s" podCreationTimestamp="2025-06-06 09:27:09 +0000 UTC" firstStartedPulling="2025-06-06 09:27:09.977737257 +0000 UTC m=+841.253162800" lastFinishedPulling="2025-06-06 09:27:15.866847053 +0000 UTC m=+847.142272596" observedRunningTime="2025-06-06 09:27:16.397790481 +0000 UTC m=+847.673216034" watchObservedRunningTime="2025-06-06 09:27:16.39854151 +0000 UTC m=+847.673967053" Jun 06 09:27:19 crc kubenswrapper[4911]: I0606 09:27:19.743491 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-69c87d8fd5-b85qh" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.472366 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw"] Jun 06 09:27:52 crc kubenswrapper[4911]: E0606 09:27:52.473362 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4428e9c9-ac22-426f-90c0-1b9c741da641" containerName="container-00" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.473378 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4428e9c9-ac22-426f-90c0-1b9c741da641" containerName="container-00" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.473528 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4428e9c9-ac22-426f-90c0-1b9c741da641" containerName="container-00" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.474358 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.476300 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.477815 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-s6c65" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.480589 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.482322 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-lz7xb" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.485964 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.492137 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.502348 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-b554678df-g2vch"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.503472 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.506789 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-b8wj9" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.524251 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-97b97479c-j5r77"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.526399 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.528241 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-6564d" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.539641 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.540942 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.543633 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b554678df-g2vch"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.547153 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-f2vrh" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.556152 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-97b97479c-j5r77"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.576342 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.588658 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.590208 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.593564 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-grlfc" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.597874 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.602424 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.603824 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.609532 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-lxs58" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.609878 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.616712 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.625056 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.634139 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.639896 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-ttl79" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.643474 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.659138 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.660165 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.660805 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr645\" (UniqueName: \"kubernetes.io/projected/0631c55e-f521-4524-9881-14a20a5b280d-kube-api-access-nr645\") pod \"glance-operator-controller-manager-97b97479c-j5r77\" (UID: \"0631c55e-f521-4524-9881-14a20a5b280d\") " pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.660870 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4s52\" (UniqueName: \"kubernetes.io/projected/2ac3f341-0925-45e2-a1fe-f356cde13a14-kube-api-access-m4s52\") pod \"designate-operator-controller-manager-b554678df-g2vch\" (UID: \"2ac3f341-0925-45e2-a1fe-f356cde13a14\") " pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.660944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj9q8\" (UniqueName: \"kubernetes.io/projected/b6cd86f9-52b4-430f-b1d6-105fe436aff6-kube-api-access-wj9q8\") pod \"heat-operator-controller-manager-5486f4b54f-klhdq\" (UID: \"b6cd86f9-52b4-430f-b1d6-105fe436aff6\") " pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.660997 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grwsh\" (UniqueName: \"kubernetes.io/projected/02910366-cde2-4ad1-a276-cb4fe7c3e4c1-kube-api-access-grwsh\") pod \"cinder-operator-controller-manager-57f4dc9749-jnxvw\" (UID: \"02910366-cde2-4ad1-a276-cb4fe7c3e4c1\") " pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.661034 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvkxd\" (UniqueName: \"kubernetes.io/projected/c5040ae4-3ac1-4bf6-a982-4dd494402e9f-kube-api-access-zvkxd\") pod \"barbican-operator-controller-manager-9889b4756-xv9ps\" (UID: \"c5040ae4-3ac1-4bf6-a982-4dd494402e9f\") " pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.663060 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-bwcm8" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.694160 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.695440 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.700349 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-qrlzp" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.702107 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.712228 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.713263 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.717822 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-7l99b" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.729985 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.752951 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.753197 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.754302 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.756853 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-j5nhp" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762280 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62p22\" (UniqueName: \"kubernetes.io/projected/229f54b7-db5c-48f8-9188-8ba38df574b5-kube-api-access-62p22\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762355 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grwsh\" (UniqueName: \"kubernetes.io/projected/02910366-cde2-4ad1-a276-cb4fe7c3e4c1-kube-api-access-grwsh\") pod \"cinder-operator-controller-manager-57f4dc9749-jnxvw\" (UID: \"02910366-cde2-4ad1-a276-cb4fe7c3e4c1\") " pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762395 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvkxd\" (UniqueName: \"kubernetes.io/projected/c5040ae4-3ac1-4bf6-a982-4dd494402e9f-kube-api-access-zvkxd\") pod \"barbican-operator-controller-manager-9889b4756-xv9ps\" (UID: \"c5040ae4-3ac1-4bf6-a982-4dd494402e9f\") " pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762432 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr645\" (UniqueName: \"kubernetes.io/projected/0631c55e-f521-4524-9881-14a20a5b280d-kube-api-access-nr645\") pod \"glance-operator-controller-manager-97b97479c-j5r77\" (UID: \"0631c55e-f521-4524-9881-14a20a5b280d\") " pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762465 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ffrp\" (UniqueName: \"kubernetes.io/projected/38945f0e-4d6b-44ba-b644-cdb391508f47-kube-api-access-6ffrp\") pod \"keystone-operator-controller-manager-5ccbd96f89-fw9ps\" (UID: \"38945f0e-4d6b-44ba-b644-cdb391508f47\") " pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762501 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2mgj\" (UniqueName: \"kubernetes.io/projected/3431e651-edaf-4b0b-b6a1-f56fbae01047-kube-api-access-k2mgj\") pod \"horizon-operator-controller-manager-7777cf768b-l4tg9\" (UID: \"3431e651-edaf-4b0b-b6a1-f56fbae01047\") " pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762531 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4s52\" (UniqueName: \"kubernetes.io/projected/2ac3f341-0925-45e2-a1fe-f356cde13a14-kube-api-access-m4s52\") pod \"designate-operator-controller-manager-b554678df-g2vch\" (UID: \"2ac3f341-0925-45e2-a1fe-f356cde13a14\") " pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762557 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcxl9\" (UniqueName: \"kubernetes.io/projected/639b9190-f625-453c-8ab5-d48b2140f801-kube-api-access-xcxl9\") pod \"ironic-operator-controller-manager-68f4bbb747-rsbj6\" (UID: \"639b9190-f625-453c-8ab5-d48b2140f801\") " pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762582 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.762637 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj9q8\" (UniqueName: \"kubernetes.io/projected/b6cd86f9-52b4-430f-b1d6-105fe436aff6-kube-api-access-wj9q8\") pod \"heat-operator-controller-manager-5486f4b54f-klhdq\" (UID: \"b6cd86f9-52b4-430f-b1d6-105fe436aff6\") " pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.767860 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.779483 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.781450 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.784209 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.786757 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-2xq8d" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.790063 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4s52\" (UniqueName: \"kubernetes.io/projected/2ac3f341-0925-45e2-a1fe-f356cde13a14-kube-api-access-m4s52\") pod \"designate-operator-controller-manager-b554678df-g2vch\" (UID: \"2ac3f341-0925-45e2-a1fe-f356cde13a14\") " pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.790065 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj9q8\" (UniqueName: \"kubernetes.io/projected/b6cd86f9-52b4-430f-b1d6-105fe436aff6-kube-api-access-wj9q8\") pod \"heat-operator-controller-manager-5486f4b54f-klhdq\" (UID: \"b6cd86f9-52b4-430f-b1d6-105fe436aff6\") " pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.790506 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr645\" (UniqueName: \"kubernetes.io/projected/0631c55e-f521-4524-9881-14a20a5b280d-kube-api-access-nr645\") pod \"glance-operator-controller-manager-97b97479c-j5r77\" (UID: \"0631c55e-f521-4524-9881-14a20a5b280d\") " pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.795027 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvkxd\" (UniqueName: \"kubernetes.io/projected/c5040ae4-3ac1-4bf6-a982-4dd494402e9f-kube-api-access-zvkxd\") pod \"barbican-operator-controller-manager-9889b4756-xv9ps\" (UID: \"c5040ae4-3ac1-4bf6-a982-4dd494402e9f\") " pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.811935 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grwsh\" (UniqueName: \"kubernetes.io/projected/02910366-cde2-4ad1-a276-cb4fe7c3e4c1-kube-api-access-grwsh\") pod \"cinder-operator-controller-manager-57f4dc9749-jnxvw\" (UID: \"02910366-cde2-4ad1-a276-cb4fe7c3e4c1\") " pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.820249 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.827472 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.829911 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.832869 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-hfxt4" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.845944 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.856338 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.856669 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.865023 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqgfs\" (UniqueName: \"kubernetes.io/projected/33d5bd3b-0223-4a9b-93a7-85004ae5f40a-kube-api-access-xqgfs\") pod \"mariadb-operator-controller-manager-7d4bbc7f54-rgdz4\" (UID: \"33d5bd3b-0223-4a9b-93a7-85004ae5f40a\") " pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.865593 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ffrp\" (UniqueName: \"kubernetes.io/projected/38945f0e-4d6b-44ba-b644-cdb391508f47-kube-api-access-6ffrp\") pod \"keystone-operator-controller-manager-5ccbd96f89-fw9ps\" (UID: \"38945f0e-4d6b-44ba-b644-cdb391508f47\") " pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.865771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2mgj\" (UniqueName: \"kubernetes.io/projected/3431e651-edaf-4b0b-b6a1-f56fbae01047-kube-api-access-k2mgj\") pod \"horizon-operator-controller-manager-7777cf768b-l4tg9\" (UID: \"3431e651-edaf-4b0b-b6a1-f56fbae01047\") " pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.866056 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxc7r\" (UniqueName: \"kubernetes.io/projected/6ae578bf-06e2-4fcd-b272-84216e832cb2-kube-api-access-pxc7r\") pod \"neutron-operator-controller-manager-5df6744645-slsxs\" (UID: \"6ae578bf-06e2-4fcd-b272-84216e832cb2\") " pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.867583 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zpmh\" (UniqueName: \"kubernetes.io/projected/8d372995-214b-497b-807e-56a813866d07-kube-api-access-7zpmh\") pod \"manila-operator-controller-manager-75b8755b74-8djvm\" (UID: \"8d372995-214b-497b-807e-56a813866d07\") " pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.867743 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcxl9\" (UniqueName: \"kubernetes.io/projected/639b9190-f625-453c-8ab5-d48b2140f801-kube-api-access-xcxl9\") pod \"ironic-operator-controller-manager-68f4bbb747-rsbj6\" (UID: \"639b9190-f625-453c-8ab5-d48b2140f801\") " pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.867787 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:52 crc kubenswrapper[4911]: E0606 09:27:52.868513 4911 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jun 06 09:27:52 crc kubenswrapper[4911]: E0606 09:27:52.868632 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert podName:229f54b7-db5c-48f8-9188-8ba38df574b5 nodeName:}" failed. No retries permitted until 2025-06-06 09:27:53.368604639 +0000 UTC m=+884.644030182 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert") pod "infra-operator-controller-manager-5b4ccb8c4-t77qw" (UID: "229f54b7-db5c-48f8-9188-8ba38df574b5") : secret "infra-operator-webhook-server-cert" not found Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.886804 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62p22\" (UniqueName: \"kubernetes.io/projected/229f54b7-db5c-48f8-9188-8ba38df574b5-kube-api-access-62p22\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.887800 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.888359 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.892369 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.897881 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2mgj\" (UniqueName: \"kubernetes.io/projected/3431e651-edaf-4b0b-b6a1-f56fbae01047-kube-api-access-k2mgj\") pod \"horizon-operator-controller-manager-7777cf768b-l4tg9\" (UID: \"3431e651-edaf-4b0b-b6a1-f56fbae01047\") " pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.899512 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ffrp\" (UniqueName: \"kubernetes.io/projected/38945f0e-4d6b-44ba-b644-cdb391508f47-kube-api-access-6ffrp\") pod \"keystone-operator-controller-manager-5ccbd96f89-fw9ps\" (UID: \"38945f0e-4d6b-44ba-b644-cdb391508f47\") " pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.906932 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.913271 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-skhd8" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.915554 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62p22\" (UniqueName: \"kubernetes.io/projected/229f54b7-db5c-48f8-9188-8ba38df574b5-kube-api-access-62p22\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.915706 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.915817 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcxl9\" (UniqueName: \"kubernetes.io/projected/639b9190-f625-453c-8ab5-d48b2140f801-kube-api-access-xcxl9\") pod \"ironic-operator-controller-manager-68f4bbb747-rsbj6\" (UID: \"639b9190-f625-453c-8ab5-d48b2140f801\") " pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.922882 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.924001 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.925184 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.927461 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-jvv5l" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.929580 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-58f798889d-4xw87"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.930839 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.934944 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-kv5gc" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.936016 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.943397 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.948217 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.952382 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-zkqwz" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.952614 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-58f798889d-4xw87"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.959206 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc"] Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.964957 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.982300 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.989356 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jstg4\" (UniqueName: \"kubernetes.io/projected/06051762-e014-4c42-9e66-e34da6b618c8-kube-api-access-jstg4\") pod \"nova-operator-controller-manager-664db87fd8-hfzdg\" (UID: \"06051762-e014-4c42-9e66-e34da6b618c8\") " pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.989445 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shr8l\" (UniqueName: \"kubernetes.io/projected/39ddd5bc-de82-4e82-9744-b4d5a64e052f-kube-api-access-shr8l\") pod \"octavia-operator-controller-manager-857f9d6b88-ggz4c\" (UID: \"39ddd5bc-de82-4e82-9744-b4d5a64e052f\") " pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.989510 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqgfs\" (UniqueName: \"kubernetes.io/projected/33d5bd3b-0223-4a9b-93a7-85004ae5f40a-kube-api-access-xqgfs\") pod \"mariadb-operator-controller-manager-7d4bbc7f54-rgdz4\" (UID: \"33d5bd3b-0223-4a9b-93a7-85004ae5f40a\") " pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.989553 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxc7r\" (UniqueName: \"kubernetes.io/projected/6ae578bf-06e2-4fcd-b272-84216e832cb2-kube-api-access-pxc7r\") pod \"neutron-operator-controller-manager-5df6744645-slsxs\" (UID: \"6ae578bf-06e2-4fcd-b272-84216e832cb2\") " pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" Jun 06 09:27:52 crc kubenswrapper[4911]: I0606 09:27:52.989585 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zpmh\" (UniqueName: \"kubernetes.io/projected/8d372995-214b-497b-807e-56a813866d07-kube-api-access-7zpmh\") pod \"manila-operator-controller-manager-75b8755b74-8djvm\" (UID: \"8d372995-214b-497b-807e-56a813866d07\") " pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.031206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zpmh\" (UniqueName: \"kubernetes.io/projected/8d372995-214b-497b-807e-56a813866d07-kube-api-access-7zpmh\") pod \"manila-operator-controller-manager-75b8755b74-8djvm\" (UID: \"8d372995-214b-497b-807e-56a813866d07\") " pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.035454 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.040790 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxc7r\" (UniqueName: \"kubernetes.io/projected/6ae578bf-06e2-4fcd-b272-84216e832cb2-kube-api-access-pxc7r\") pod \"neutron-operator-controller-manager-5df6744645-slsxs\" (UID: \"6ae578bf-06e2-4fcd-b272-84216e832cb2\") " pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.046756 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqgfs\" (UniqueName: \"kubernetes.io/projected/33d5bd3b-0223-4a9b-93a7-85004ae5f40a-kube-api-access-xqgfs\") pod \"mariadb-operator-controller-manager-7d4bbc7f54-rgdz4\" (UID: \"33d5bd3b-0223-4a9b-93a7-85004ae5f40a\") " pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.076546 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.090884 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqfnn\" (UniqueName: \"kubernetes.io/projected/dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944-kube-api-access-kqfnn\") pod \"ovn-operator-controller-manager-9f78645d5-6p5gr\" (UID: \"dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944\") " pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.090974 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnzxk\" (UniqueName: \"kubernetes.io/projected/62a5da69-9938-40e9-944a-889b6a7fcc04-kube-api-access-xnzxk\") pod \"placement-operator-controller-manager-58f798889d-4xw87\" (UID: \"62a5da69-9938-40e9-944a-889b6a7fcc04\") " pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.091027 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shr8l\" (UniqueName: \"kubernetes.io/projected/39ddd5bc-de82-4e82-9744-b4d5a64e052f-kube-api-access-shr8l\") pod \"octavia-operator-controller-manager-857f9d6b88-ggz4c\" (UID: \"39ddd5bc-de82-4e82-9744-b4d5a64e052f\") " pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.091061 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b8c5692-2750-401d-bee2-4717f71fc6df-cert\") pod \"openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn\" (UID: \"2b8c5692-2750-401d-bee2-4717f71fc6df\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.091133 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-252v9\" (UniqueName: \"kubernetes.io/projected/2b8c5692-2750-401d-bee2-4717f71fc6df-kube-api-access-252v9\") pod \"openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn\" (UID: \"2b8c5692-2750-401d-bee2-4717f71fc6df\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.091174 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhnk9\" (UniqueName: \"kubernetes.io/projected/0243dd73-76b2-4168-8106-14676305be39-kube-api-access-jhnk9\") pod \"swift-operator-controller-manager-7779c57cf7-s7shc\" (UID: \"0243dd73-76b2-4168-8106-14676305be39\") " pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.091227 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jstg4\" (UniqueName: \"kubernetes.io/projected/06051762-e014-4c42-9e66-e34da6b618c8-kube-api-access-jstg4\") pod \"nova-operator-controller-manager-664db87fd8-hfzdg\" (UID: \"06051762-e014-4c42-9e66-e34da6b618c8\") " pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.102460 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.130572 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.132059 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.138564 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-55t9x" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.141632 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jstg4\" (UniqueName: \"kubernetes.io/projected/06051762-e014-4c42-9e66-e34da6b618c8-kube-api-access-jstg4\") pod \"nova-operator-controller-manager-664db87fd8-hfzdg\" (UID: \"06051762-e014-4c42-9e66-e34da6b618c8\") " pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.146889 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shr8l\" (UniqueName: \"kubernetes.io/projected/39ddd5bc-de82-4e82-9744-b4d5a64e052f-kube-api-access-shr8l\") pod \"octavia-operator-controller-manager-857f9d6b88-ggz4c\" (UID: \"39ddd5bc-de82-4e82-9744-b4d5a64e052f\") " pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.157169 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.157813 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.184350 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.185782 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.192420 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-s97sj" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.192590 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.193580 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b8c5692-2750-401d-bee2-4717f71fc6df-cert\") pod \"openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn\" (UID: \"2b8c5692-2750-401d-bee2-4717f71fc6df\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.193611 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmj4c\" (UniqueName: \"kubernetes.io/projected/5bddb4bd-da2f-405e-a72e-3b89c3526010-kube-api-access-dmj4c\") pod \"test-operator-controller-manager-6db7bffb67-stlv6\" (UID: \"5bddb4bd-da2f-405e-a72e-3b89c3526010\") " pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.193641 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-252v9\" (UniqueName: \"kubernetes.io/projected/2b8c5692-2750-401d-bee2-4717f71fc6df-kube-api-access-252v9\") pod \"openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn\" (UID: \"2b8c5692-2750-401d-bee2-4717f71fc6df\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.193671 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhnk9\" (UniqueName: \"kubernetes.io/projected/0243dd73-76b2-4168-8106-14676305be39-kube-api-access-jhnk9\") pod \"swift-operator-controller-manager-7779c57cf7-s7shc\" (UID: \"0243dd73-76b2-4168-8106-14676305be39\") " pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.193708 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjbmq\" (UniqueName: \"kubernetes.io/projected/4ed91cbd-66c2-4144-8770-c1495382976a-kube-api-access-vjbmq\") pod \"telemetry-operator-controller-manager-884d667-t6d6w\" (UID: \"4ed91cbd-66c2-4144-8770-c1495382976a\") " pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.193735 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqfnn\" (UniqueName: \"kubernetes.io/projected/dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944-kube-api-access-kqfnn\") pod \"ovn-operator-controller-manager-9f78645d5-6p5gr\" (UID: \"dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944\") " pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.193764 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnzxk\" (UniqueName: \"kubernetes.io/projected/62a5da69-9938-40e9-944a-889b6a7fcc04-kube-api-access-xnzxk\") pod \"placement-operator-controller-manager-58f798889d-4xw87\" (UID: \"62a5da69-9938-40e9-944a-889b6a7fcc04\") " pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.194013 4911 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.194048 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b8c5692-2750-401d-bee2-4717f71fc6df-cert podName:2b8c5692-2750-401d-bee2-4717f71fc6df nodeName:}" failed. No retries permitted until 2025-06-06 09:27:53.694035436 +0000 UTC m=+884.969460979 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2b8c5692-2750-401d-bee2-4717f71fc6df-cert") pod "openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" (UID: "2b8c5692-2750-401d-bee2-4717f71fc6df") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.205579 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.206673 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.212331 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.214407 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-j69kq" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.232789 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqfnn\" (UniqueName: \"kubernetes.io/projected/dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944-kube-api-access-kqfnn\") pod \"ovn-operator-controller-manager-9f78645d5-6p5gr\" (UID: \"dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944\") " pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.235697 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnzxk\" (UniqueName: \"kubernetes.io/projected/62a5da69-9938-40e9-944a-889b6a7fcc04-kube-api-access-xnzxk\") pod \"placement-operator-controller-manager-58f798889d-4xw87\" (UID: \"62a5da69-9938-40e9-944a-889b6a7fcc04\") " pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.240085 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhnk9\" (UniqueName: \"kubernetes.io/projected/0243dd73-76b2-4168-8106-14676305be39-kube-api-access-jhnk9\") pod \"swift-operator-controller-manager-7779c57cf7-s7shc\" (UID: \"0243dd73-76b2-4168-8106-14676305be39\") " pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.246352 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-252v9\" (UniqueName: \"kubernetes.io/projected/2b8c5692-2750-401d-bee2-4717f71fc6df-kube-api-access-252v9\") pod \"openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn\" (UID: \"2b8c5692-2750-401d-bee2-4717f71fc6df\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.266644 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.270935 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.299049 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.304492 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjbmq\" (UniqueName: \"kubernetes.io/projected/4ed91cbd-66c2-4144-8770-c1495382976a-kube-api-access-vjbmq\") pod \"telemetry-operator-controller-manager-884d667-t6d6w\" (UID: \"4ed91cbd-66c2-4144-8770-c1495382976a\") " pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.304542 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbv7k\" (UniqueName: \"kubernetes.io/projected/24fd9a67-5d42-4633-bc61-19f5975b18ff-kube-api-access-sbv7k\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.304580 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.304637 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmj4c\" (UniqueName: \"kubernetes.io/projected/5bddb4bd-da2f-405e-a72e-3b89c3526010-kube-api-access-dmj4c\") pod \"test-operator-controller-manager-6db7bffb67-stlv6\" (UID: \"5bddb4bd-da2f-405e-a72e-3b89c3526010\") " pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.315654 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.318347 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.328883 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.329460 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.333310 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-ngsxg" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.338862 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmj4c\" (UniqueName: \"kubernetes.io/projected/5bddb4bd-da2f-405e-a72e-3b89c3526010-kube-api-access-dmj4c\") pod \"test-operator-controller-manager-6db7bffb67-stlv6\" (UID: \"5bddb4bd-da2f-405e-a72e-3b89c3526010\") " pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.339803 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.361309 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjbmq\" (UniqueName: \"kubernetes.io/projected/4ed91cbd-66c2-4144-8770-c1495382976a-kube-api-access-vjbmq\") pod \"telemetry-operator-controller-manager-884d667-t6d6w\" (UID: \"4ed91cbd-66c2-4144-8770-c1495382976a\") " pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.382022 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.405434 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.405493 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbv7k\" (UniqueName: \"kubernetes.io/projected/24fd9a67-5d42-4633-bc61-19f5975b18ff-kube-api-access-sbv7k\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.405523 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.405570 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxkwn\" (UniqueName: \"kubernetes.io/projected/08567c0a-fcf6-4ed2-8477-bccd389f2a6d-kube-api-access-dxkwn\") pod \"rabbitmq-cluster-operator-manager-67ff8584d-djvws\" (UID: \"08567c0a-fcf6-4ed2-8477-bccd389f2a6d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.405677 4911 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.405762 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert podName:229f54b7-db5c-48f8-9188-8ba38df574b5 nodeName:}" failed. No retries permitted until 2025-06-06 09:27:54.405737254 +0000 UTC m=+885.681162817 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert") pod "infra-operator-controller-manager-5b4ccb8c4-t77qw" (UID: "229f54b7-db5c-48f8-9188-8ba38df574b5") : secret "infra-operator-webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.405858 4911 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.405966 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert podName:24fd9a67-5d42-4633-bc61-19f5975b18ff nodeName:}" failed. No retries permitted until 2025-06-06 09:27:53.905944849 +0000 UTC m=+885.181370392 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert") pod "openstack-operator-controller-manager-764c8d9cbc-rtgz2" (UID: "24fd9a67-5d42-4633-bc61-19f5975b18ff") : secret "webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.425288 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbv7k\" (UniqueName: \"kubernetes.io/projected/24fd9a67-5d42-4633-bc61-19f5975b18ff-kube-api-access-sbv7k\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.496223 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.506734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxkwn\" (UniqueName: \"kubernetes.io/projected/08567c0a-fcf6-4ed2-8477-bccd389f2a6d-kube-api-access-dxkwn\") pod \"rabbitmq-cluster-operator-manager-67ff8584d-djvws\" (UID: \"08567c0a-fcf6-4ed2-8477-bccd389f2a6d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.513548 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.531891 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxkwn\" (UniqueName: \"kubernetes.io/projected/08567c0a-fcf6-4ed2-8477-bccd389f2a6d-kube-api-access-dxkwn\") pod \"rabbitmq-cluster-operator-manager-67ff8584d-djvws\" (UID: \"08567c0a-fcf6-4ed2-8477-bccd389f2a6d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.531959 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.569935 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.609146 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" event={"ID":"c5040ae4-3ac1-4bf6-a982-4dd494402e9f","Type":"ContainerStarted","Data":"7e5314682e3fb4d35dfc16fe232f0d5f798a4c5ba64e1e070fbd85b43e6ef0c3"} Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.644448 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b554678df-g2vch"] Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.712645 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b8c5692-2750-401d-bee2-4717f71fc6df-cert\") pod \"openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn\" (UID: \"2b8c5692-2750-401d-bee2-4717f71fc6df\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.719972 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2b8c5692-2750-401d-bee2-4717f71fc6df-cert\") pod \"openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn\" (UID: \"2b8c5692-2750-401d-bee2-4717f71fc6df\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.916074 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.916418 4911 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: E0606 09:27:53.916487 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert podName:24fd9a67-5d42-4633-bc61-19f5975b18ff nodeName:}" failed. No retries permitted until 2025-06-06 09:27:54.916467595 +0000 UTC m=+886.191893138 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert") pod "openstack-operator-controller-manager-764c8d9cbc-rtgz2" (UID: "24fd9a67-5d42-4633-bc61-19f5975b18ff") : secret "webhook-server-cert" not found Jun 06 09:27:53 crc kubenswrapper[4911]: I0606 09:27:53.979284 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.102233 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-97b97479c-j5r77"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.142932 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq"] Jun 06 09:27:54 crc kubenswrapper[4911]: W0606 09:27:54.151314 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6cd86f9_52b4_430f_b1d6_105fe436aff6.slice/crio-c466c76c058747f8b44058ed35f625f102b7227ada5c31c54e6e890c501bafd9 WatchSource:0}: Error finding container c466c76c058747f8b44058ed35f625f102b7227ada5c31c54e6e890c501bafd9: Status 404 returned error can't find the container with id c466c76c058747f8b44058ed35f625f102b7227ada5c31c54e6e890c501bafd9 Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.290922 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.300515 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.300575 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.315506 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-58f798889d-4xw87"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.325066 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.334107 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.339361 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.347200 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.352856 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.358086 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.435716 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.449296 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/229f54b7-db5c-48f8-9188-8ba38df574b5-cert\") pod \"infra-operator-controller-manager-5b4ccb8c4-t77qw\" (UID: \"229f54b7-db5c-48f8-9188-8ba38df574b5\") " pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.480870 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.493200 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr"] Jun 06 09:27:54 crc kubenswrapper[4911]: W0606 09:27:54.493929 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33d5bd3b_0223_4a9b_93a7_85004ae5f40a.slice/crio-d7dcfe660a3b089903c7df8818f065787d15a3a375295a3c44cbc5ad76a87aa8 WatchSource:0}: Error finding container d7dcfe660a3b089903c7df8818f065787d15a3a375295a3c44cbc5ad76a87aa8: Status 404 returned error can't find the container with id d7dcfe660a3b089903c7df8818f065787d15a3a375295a3c44cbc5ad76a87aa8 Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.498826 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs"] Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.505303 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:603cfc7eca2feb4bca4fef6f545104386a65e911559589f1395239afbc101daf,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{268435456 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kqfnn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9f78645d5-6p5gr_openstack-operators(dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.506711 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:da08b3c2e379399cdab66cf8c5248ed8bf9783745cf4d68a6c9efb0c30838a0a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{268435456 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pxc7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5df6744645-slsxs_openstack-operators(6ae578bf-06e2-4fcd-b272-84216e832cb2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.521755 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w"] Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.528402 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc"] Jun 06 09:27:54 crc kubenswrapper[4911]: W0606 09:27:54.542641 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0243dd73_76b2_4168_8106_14676305be39.slice/crio-ffc8ba9f4a33039c0bf58448847e0652411291322baf31e061d95dbd68f668ce WatchSource:0}: Error finding container ffc8ba9f4a33039c0bf58448847e0652411291322baf31e061d95dbd68f668ce: Status 404 returned error can't find the container with id ffc8ba9f4a33039c0bf58448847e0652411291322baf31e061d95dbd68f668ce Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.546969 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws"] Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.554787 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:47d1eacd07738b8dc59814467f756e12092d57c051b119be499f425ec738d607,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{268435456 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vjbmq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-884d667-t6d6w_openstack-operators(4ed91cbd-66c2-4144-8770-c1495382976a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.555553 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6"] Jun 06 09:27:54 crc kubenswrapper[4911]: W0606 09:27:54.557373 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08567c0a_fcf6_4ed2_8477_bccd389f2a6d.slice/crio-0bb1849fa5c115a388a22df81d14c83cbf1b58997cbd7621d0144b8c7f8ce4f6 WatchSource:0}: Error finding container 0bb1849fa5c115a388a22df81d14c83cbf1b58997cbd7621d0144b8c7f8ce4f6: Status 404 returned error can't find the container with id 0bb1849fa5c115a388a22df81d14c83cbf1b58997cbd7621d0144b8c7f8ce4f6 Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.562630 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn"] Jun 06 09:27:54 crc kubenswrapper[4911]: W0606 09:27:54.564124 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bddb4bd_da2f_405e_a72e_3b89c3526010.slice/crio-6ec99ece172e81659c8f10d33455fa78bfd4f23f27b7f8ff323b9cadc63525b6 WatchSource:0}: Error finding container 6ec99ece172e81659c8f10d33455fa78bfd4f23f27b7f8ff323b9cadc63525b6: Status 404 returned error can't find the container with id 6ec99ece172e81659c8f10d33455fa78bfd4f23f27b7f8ff323b9cadc63525b6 Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.564257 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dxkwn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-67ff8584d-djvws_openstack-operators(08567c0a-fcf6-4ed2-8477-bccd389f2a6d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.565699 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" podUID="08567c0a-fcf6-4ed2-8477-bccd389f2a6d" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.566906 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:5d237421ae87d4a765a6ba8e4ab6e82e2fc082f2bf900174be343710e043ba2a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{268435456 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dmj4c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-6db7bffb67-stlv6_openstack-operators(5bddb4bd-da2f-405e-a72e-3b89c3526010): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.577503 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:3f2c7ab5d9e6cf3ff9cd3055751cb1189c5421bdca65f4652a804be08d99e717,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{268435456 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-252v9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn_openstack-operators(2b8c5692-2750-401d-bee2-4717f71fc6df): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.628785 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" event={"ID":"08567c0a-fcf6-4ed2-8477-bccd389f2a6d","Type":"ContainerStarted","Data":"0bb1849fa5c115a388a22df81d14c83cbf1b58997cbd7621d0144b8c7f8ce4f6"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.633576 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" event={"ID":"62a5da69-9938-40e9-944a-889b6a7fcc04","Type":"ContainerStarted","Data":"e9f79cf69fb2fc8dcb62959c2e1781659d61c1508a62170905669eda26a6405b"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.640329 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" event={"ID":"3431e651-edaf-4b0b-b6a1-f56fbae01047","Type":"ContainerStarted","Data":"6767ce29da748ec403bd845bfd708eb19a43caa9c5fa1f00b5580fa70a36b3c9"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.642970 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" event={"ID":"02910366-cde2-4ad1-a276-cb4fe7c3e4c1","Type":"ContainerStarted","Data":"5495edb7fc846541f905da35c4a254e406aa8453e6282f7617a6adede66ac55f"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.650681 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" event={"ID":"38945f0e-4d6b-44ba-b644-cdb391508f47","Type":"ContainerStarted","Data":"49a1088c0a6b0e23675e02c6a772e5d0e12455c3a2995c223072f03d49181070"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.653840 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" event={"ID":"0243dd73-76b2-4168-8106-14676305be39","Type":"ContainerStarted","Data":"ffc8ba9f4a33039c0bf58448847e0652411291322baf31e061d95dbd68f668ce"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.655517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" event={"ID":"4ed91cbd-66c2-4144-8770-c1495382976a","Type":"ContainerStarted","Data":"bb1386041782215e833cf299ca59ddf03fe359e6ae6d5a1bcee8938d291d8c71"} Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.661683 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" podUID="08567c0a-fcf6-4ed2-8477-bccd389f2a6d" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.667485 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" event={"ID":"0631c55e-f521-4524-9881-14a20a5b280d","Type":"ContainerStarted","Data":"16a5b5cd2b9f6f3bb73d5aeee66b28017d2346e06519c9471e9761202f21623d"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.669037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" event={"ID":"6ae578bf-06e2-4fcd-b272-84216e832cb2","Type":"ContainerStarted","Data":"691c711ebb37e097cf3414ec2ea3e123ac44916dc241e0f3daaa84d535a2dd90"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.669887 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" event={"ID":"5bddb4bd-da2f-405e-a72e-3b89c3526010","Type":"ContainerStarted","Data":"6ec99ece172e81659c8f10d33455fa78bfd4f23f27b7f8ff323b9cadc63525b6"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.670810 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" event={"ID":"8d372995-214b-497b-807e-56a813866d07","Type":"ContainerStarted","Data":"2daee85b0d16a69f53ab73994c79981aac478e9a2ffc97bf5982f8d9192c035e"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.671653 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" event={"ID":"2ac3f341-0925-45e2-a1fe-f356cde13a14","Type":"ContainerStarted","Data":"cff79df916ad720a70093c54363c07fe3f682ad3fe2d124daaf24fd6727ca764"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.672355 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" event={"ID":"33d5bd3b-0223-4a9b-93a7-85004ae5f40a","Type":"ContainerStarted","Data":"d7dcfe660a3b089903c7df8818f065787d15a3a375295a3c44cbc5ad76a87aa8"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.673519 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" event={"ID":"dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944","Type":"ContainerStarted","Data":"3b257f5fd558b521329d4655afa265374c8d043d995cf3ee84099e7c59ab21ca"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.674260 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" event={"ID":"2b8c5692-2750-401d-bee2-4717f71fc6df","Type":"ContainerStarted","Data":"a319100f48c79c0c8cd357c9ad56ef988828ec0404c54101221a8717cfb0a826"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.675280 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" event={"ID":"639b9190-f625-453c-8ab5-d48b2140f801","Type":"ContainerStarted","Data":"b717af2452c2e6020a52fd82d91069ff01501ebec79aa0095861324b6a54b4dd"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.676042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" event={"ID":"06051762-e014-4c42-9e66-e34da6b618c8","Type":"ContainerStarted","Data":"9c3991b0b114d423635fd7b566e4684fca0741c3d6aa84b08c913d28e6c69803"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.677029 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" event={"ID":"39ddd5bc-de82-4e82-9744-b4d5a64e052f","Type":"ContainerStarted","Data":"6ecbf11a01aaf3c785f9255831534030ce230052557b7345893ace2e9b149e3d"} Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.677972 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" event={"ID":"b6cd86f9-52b4-430f-b1d6-105fe436aff6","Type":"ContainerStarted","Data":"c466c76c058747f8b44058ed35f625f102b7227ada5c31c54e6e890c501bafd9"} Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.711867 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" podUID="dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.726579 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" podUID="6ae578bf-06e2-4fcd-b272-84216e832cb2" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.736670 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.760364 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" podUID="4ed91cbd-66c2-4144-8770-c1495382976a" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.840660 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" podUID="5bddb4bd-da2f-405e-a72e-3b89c3526010" Jun 06 09:27:54 crc kubenswrapper[4911]: E0606 09:27:54.862365 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" podUID="2b8c5692-2750-401d-bee2-4717f71fc6df" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.954783 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:54 crc kubenswrapper[4911]: I0606 09:27:54.962495 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24fd9a67-5d42-4633-bc61-19f5975b18ff-cert\") pod \"openstack-operator-controller-manager-764c8d9cbc-rtgz2\" (UID: \"24fd9a67-5d42-4633-bc61-19f5975b18ff\") " pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.055496 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw"] Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.057518 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.367957 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2"] Jun 06 09:27:55 crc kubenswrapper[4911]: W0606 09:27:55.390816 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24fd9a67_5d42_4633_bc61_19f5975b18ff.slice/crio-2cbafcee5914fefef9660049da8d09ea6bc6fcfa4a3ee0c21ac5907533e90abc WatchSource:0}: Error finding container 2cbafcee5914fefef9660049da8d09ea6bc6fcfa4a3ee0c21ac5907533e90abc: Status 404 returned error can't find the container with id 2cbafcee5914fefef9660049da8d09ea6bc6fcfa4a3ee0c21ac5907533e90abc Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.748741 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" event={"ID":"24fd9a67-5d42-4633-bc61-19f5975b18ff","Type":"ContainerStarted","Data":"090390e3b55ffc0ad5e94c2ef6b00682020595fa366862e49365ad0fb7ee1d52"} Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.749050 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" event={"ID":"24fd9a67-5d42-4633-bc61-19f5975b18ff","Type":"ContainerStarted","Data":"2cbafcee5914fefef9660049da8d09ea6bc6fcfa4a3ee0c21ac5907533e90abc"} Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.773596 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" event={"ID":"229f54b7-db5c-48f8-9188-8ba38df574b5","Type":"ContainerStarted","Data":"8b6ce7f2d9a9dfad43d00a222b19754853c137d06385f6fb0b0914eb0b0dd5cd"} Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.792736 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" event={"ID":"4ed91cbd-66c2-4144-8770-c1495382976a","Type":"ContainerStarted","Data":"dea351d33540dce11b1178e315845d4d5e430266a5296b677ddc528684604813"} Jun 06 09:27:55 crc kubenswrapper[4911]: E0606 09:27:55.811201 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:47d1eacd07738b8dc59814467f756e12092d57c051b119be499f425ec738d607\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" podUID="4ed91cbd-66c2-4144-8770-c1495382976a" Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.832289 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" event={"ID":"6ae578bf-06e2-4fcd-b272-84216e832cb2","Type":"ContainerStarted","Data":"5c4b909fb15547987cc320f333396df093ae1f3e31e28b2d18f96d9abc8df92c"} Jun 06 09:27:55 crc kubenswrapper[4911]: E0606 09:27:55.846481 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:da08b3c2e379399cdab66cf8c5248ed8bf9783745cf4d68a6c9efb0c30838a0a\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" podUID="6ae578bf-06e2-4fcd-b272-84216e832cb2" Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.852757 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" event={"ID":"dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944","Type":"ContainerStarted","Data":"a873b768e312047865c6ac99622a134e449266be8178c13cf258a1994675b147"} Jun 06 09:27:55 crc kubenswrapper[4911]: E0606 09:27:55.858729 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:603cfc7eca2feb4bca4fef6f545104386a65e911559589f1395239afbc101daf\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" podUID="dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944" Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.860111 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" event={"ID":"5bddb4bd-da2f-405e-a72e-3b89c3526010","Type":"ContainerStarted","Data":"0e9580469b1fcac4ce2400544fdd7447224da96d43426b11885c75e0a9fad872"} Jun 06 09:27:55 crc kubenswrapper[4911]: E0606 09:27:55.861475 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:5d237421ae87d4a765a6ba8e4ab6e82e2fc082f2bf900174be343710e043ba2a\\\"\"" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" podUID="5bddb4bd-da2f-405e-a72e-3b89c3526010" Jun 06 09:27:55 crc kubenswrapper[4911]: I0606 09:27:55.862411 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" event={"ID":"2b8c5692-2750-401d-bee2-4717f71fc6df","Type":"ContainerStarted","Data":"de2e3ce99f7a2e09ac5f4a1c7f366d2a6cc81a81fcb61e9385bcd7e9d6041f99"} Jun 06 09:27:55 crc kubenswrapper[4911]: E0606 09:27:55.863354 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" podUID="08567c0a-fcf6-4ed2-8477-bccd389f2a6d" Jun 06 09:27:55 crc kubenswrapper[4911]: E0606 09:27:55.863707 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:3f2c7ab5d9e6cf3ff9cd3055751cb1189c5421bdca65f4652a804be08d99e717\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" podUID="2b8c5692-2750-401d-bee2-4717f71fc6df" Jun 06 09:27:56 crc kubenswrapper[4911]: I0606 09:27:56.871961 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" event={"ID":"24fd9a67-5d42-4633-bc61-19f5975b18ff","Type":"ContainerStarted","Data":"c102a36b71cbfb64713691425b17b182291415e03cbba1136886eb24319594a7"} Jun 06 09:27:56 crc kubenswrapper[4911]: I0606 09:27:56.872007 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:27:56 crc kubenswrapper[4911]: E0606 09:27:56.874720 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:da08b3c2e379399cdab66cf8c5248ed8bf9783745cf4d68a6c9efb0c30838a0a\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" podUID="6ae578bf-06e2-4fcd-b272-84216e832cb2" Jun 06 09:27:56 crc kubenswrapper[4911]: E0606 09:27:56.874790 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:5d237421ae87d4a765a6ba8e4ab6e82e2fc082f2bf900174be343710e043ba2a\\\"\"" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" podUID="5bddb4bd-da2f-405e-a72e-3b89c3526010" Jun 06 09:27:56 crc kubenswrapper[4911]: E0606 09:27:56.876020 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:3f2c7ab5d9e6cf3ff9cd3055751cb1189c5421bdca65f4652a804be08d99e717\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" podUID="2b8c5692-2750-401d-bee2-4717f71fc6df" Jun 06 09:27:56 crc kubenswrapper[4911]: E0606 09:27:56.878036 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:603cfc7eca2feb4bca4fef6f545104386a65e911559589f1395239afbc101daf\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" podUID="dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944" Jun 06 09:27:56 crc kubenswrapper[4911]: E0606 09:27:56.878075 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:47d1eacd07738b8dc59814467f756e12092d57c051b119be499f425ec738d607\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" podUID="4ed91cbd-66c2-4144-8770-c1495382976a" Jun 06 09:27:56 crc kubenswrapper[4911]: I0606 09:27:56.969856 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" podStartSLOduration=3.9698317149999998 podStartE2EDuration="3.969831715s" podCreationTimestamp="2025-06-06 09:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:27:56.966934891 +0000 UTC m=+888.242360434" watchObservedRunningTime="2025-06-06 09:27:56.969831715 +0000 UTC m=+888.245257258" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.511296 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/crc-debug-k6slv"] Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.512574 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.515231 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-4rkqw" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.668858 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8129ee-9f25-4768-9cfd-3319551392fd-host\") pod \"crc-debug-k6slv\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.669038 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrfqn\" (UniqueName: \"kubernetes.io/projected/ec8129ee-9f25-4768-9cfd-3319551392fd-kube-api-access-nrfqn\") pod \"crc-debug-k6slv\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.770608 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrfqn\" (UniqueName: \"kubernetes.io/projected/ec8129ee-9f25-4768-9cfd-3319551392fd-kube-api-access-nrfqn\") pod \"crc-debug-k6slv\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.770680 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8129ee-9f25-4768-9cfd-3319551392fd-host\") pod \"crc-debug-k6slv\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.770778 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8129ee-9f25-4768-9cfd-3319551392fd-host\") pod \"crc-debug-k6slv\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.792415 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrfqn\" (UniqueName: \"kubernetes.io/projected/ec8129ee-9f25-4768-9cfd-3319551392fd-kube-api-access-nrfqn\") pod \"crc-debug-k6slv\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:01 crc kubenswrapper[4911]: I0606 09:28:01.831819 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:02 crc kubenswrapper[4911]: I0606 09:28:02.914477 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" event={"ID":"c5040ae4-3ac1-4bf6-a982-4dd494402e9f","Type":"ContainerStarted","Data":"e63a65e7a59931d2d2009654cd0ea8fc2db3c1862e07b0b3b12fdd242ac5622a"} Jun 06 09:28:02 crc kubenswrapper[4911]: I0606 09:28:02.917118 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" event={"ID":"38945f0e-4d6b-44ba-b644-cdb391508f47","Type":"ContainerStarted","Data":"e18039cfbb072a29e736aeed350e7e86ffebac5df6dec11fd95acbfae30dfc6f"} Jun 06 09:28:02 crc kubenswrapper[4911]: I0606 09:28:02.919276 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" event={"ID":"0631c55e-f521-4524-9881-14a20a5b280d","Type":"ContainerStarted","Data":"d40279ef0a5d9a052a355aceb2ec49207b1805e8ce2a4fbfac2a811aa5f64340"} Jun 06 09:28:02 crc kubenswrapper[4911]: I0606 09:28:02.920424 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/crc-debug-k6slv" event={"ID":"ec8129ee-9f25-4768-9cfd-3319551392fd","Type":"ContainerStarted","Data":"0944cb744067ba2c89ded5a6012ba1530d34f43ee3ff3f05742e00f6e4267a93"} Jun 06 09:28:02 crc kubenswrapper[4911]: I0606 09:28:02.922293 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" event={"ID":"229f54b7-db5c-48f8-9188-8ba38df574b5","Type":"ContainerStarted","Data":"f460b7187a2198e148e5ed05f4a22c61c64e57c0ac771830d1bc71f3d680a98c"} Jun 06 09:28:03 crc kubenswrapper[4911]: I0606 09:28:03.968810 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" event={"ID":"0243dd73-76b2-4168-8106-14676305be39","Type":"ContainerStarted","Data":"bcd6acb6cf820030c06e92b1d4b49f4297437779c48dc7e8a9cb293e8a83c3c1"} Jun 06 09:28:03 crc kubenswrapper[4911]: I0606 09:28:03.975155 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" event={"ID":"33d5bd3b-0223-4a9b-93a7-85004ae5f40a","Type":"ContainerStarted","Data":"0b5780caf8e93a21cb9fa3bb310e4cf26c8416e940b07658ed1ce7b839b32777"} Jun 06 09:28:03 crc kubenswrapper[4911]: I0606 09:28:03.983972 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" event={"ID":"0631c55e-f521-4524-9881-14a20a5b280d","Type":"ContainerStarted","Data":"5e1c7aa246e4cfe2556211b6ef74faa2e0953ad75f04ddf896197c0d32b82eca"} Jun 06 09:28:03 crc kubenswrapper[4911]: I0606 09:28:03.985201 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" Jun 06 09:28:03 crc kubenswrapper[4911]: I0606 09:28:03.998446 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" event={"ID":"02910366-cde2-4ad1-a276-cb4fe7c3e4c1","Type":"ContainerStarted","Data":"2b68b12b0494b7660cf88be903701184a1cd17e6cb31733795855a9df38b1394"} Jun 06 09:28:03 crc kubenswrapper[4911]: I0606 09:28:03.998496 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" event={"ID":"02910366-cde2-4ad1-a276-cb4fe7c3e4c1","Type":"ContainerStarted","Data":"ba4716e664886182b3e9bee606571efdbd19e67e333da0c7f466c14fb015cf0b"} Jun 06 09:28:03 crc kubenswrapper[4911]: I0606 09:28:03.999176 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.019122 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" podStartSLOduration=3.5713066639999997 podStartE2EDuration="12.019101888s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.12375938 +0000 UTC m=+885.399184923" lastFinishedPulling="2025-06-06 09:28:02.571554604 +0000 UTC m=+893.846980147" observedRunningTime="2025-06-06 09:28:04.014567962 +0000 UTC m=+895.289993525" watchObservedRunningTime="2025-06-06 09:28:04.019101888 +0000 UTC m=+895.294527431" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.020699 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" event={"ID":"38945f0e-4d6b-44ba-b644-cdb391508f47","Type":"ContainerStarted","Data":"25b387c790228351887729ffc5e9bfa3b3009c58dc42cb30d4b760ec487deb96"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.021750 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.038577 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" podStartSLOduration=3.8062948580000002 podStartE2EDuration="12.038560876s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.332970144 +0000 UTC m=+885.608395687" lastFinishedPulling="2025-06-06 09:28:02.565236162 +0000 UTC m=+893.840661705" observedRunningTime="2025-06-06 09:28:04.037318874 +0000 UTC m=+895.312744417" watchObservedRunningTime="2025-06-06 09:28:04.038560876 +0000 UTC m=+895.313986419" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.052792 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" event={"ID":"3431e651-edaf-4b0b-b6a1-f56fbae01047","Type":"ContainerStarted","Data":"fffd2296385f305411931b3953982361863d20ad1a562e3c517b14def5cec1fc"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.052847 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" event={"ID":"3431e651-edaf-4b0b-b6a1-f56fbae01047","Type":"ContainerStarted","Data":"97058f79a0568e7e4988452db7c9c28c0ef290a173a73c5eb92317ea415b59bb"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.053651 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.074321 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" event={"ID":"06051762-e014-4c42-9e66-e34da6b618c8","Type":"ContainerStarted","Data":"87fc1b106827de1b5febe4b6f430f2783e06af720bacc7841c93cb72861ba46c"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.077780 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" podStartSLOduration=3.8577445040000002 podStartE2EDuration="12.077765169s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.351540799 +0000 UTC m=+885.626966342" lastFinishedPulling="2025-06-06 09:28:02.571561454 +0000 UTC m=+893.846987007" observedRunningTime="2025-06-06 09:28:04.074726722 +0000 UTC m=+895.350152265" watchObservedRunningTime="2025-06-06 09:28:04.077765169 +0000 UTC m=+895.353190712" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.085762 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" event={"ID":"b6cd86f9-52b4-430f-b1d6-105fe436aff6","Type":"ContainerStarted","Data":"b2b5026d051e4a20582a3060647e5136ef78ccddfa60bbb93719c0f1d8d0e644"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.091604 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" event={"ID":"8d372995-214b-497b-807e-56a813866d07","Type":"ContainerStarted","Data":"874d5893ae8608d4b820a68f2bc4c3d70b7e4236f0c1a6a4acad81cbe277e76d"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.091650 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" event={"ID":"8d372995-214b-497b-807e-56a813866d07","Type":"ContainerStarted","Data":"0bd8ac65623a7902b5e7a9a374da090ffd1508a8b467eed2dbd5400219a91367"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.091812 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.105342 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" event={"ID":"2ac3f341-0925-45e2-a1fe-f356cde13a14","Type":"ContainerStarted","Data":"237474e0cdf20b7dded6486b15f2aa052848621da8396a669bcf6fac2278772e"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.106214 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" podStartSLOduration=3.890016951 podStartE2EDuration="12.106189797s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.361380711 +0000 UTC m=+885.636806254" lastFinishedPulling="2025-06-06 09:28:02.577553557 +0000 UTC m=+893.852979100" observedRunningTime="2025-06-06 09:28:04.101381824 +0000 UTC m=+895.376807377" watchObservedRunningTime="2025-06-06 09:28:04.106189797 +0000 UTC m=+895.381615340" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.121722 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" event={"ID":"639b9190-f625-453c-8ab5-d48b2140f801","Type":"ContainerStarted","Data":"29ba4dcdce5cd155e440e9554544e7eee08c2f51d5005283ca2a65e346369830"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.126130 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/crc-debug-k6slv" event={"ID":"ec8129ee-9f25-4768-9cfd-3319551392fd","Type":"ContainerStarted","Data":"c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.128292 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" event={"ID":"62a5da69-9938-40e9-944a-889b6a7fcc04","Type":"ContainerStarted","Data":"9574b73fd72c51b3af9946c5cd34f932362675265a83a63a7c93d46d712e0eb1"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.151354 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" event={"ID":"39ddd5bc-de82-4e82-9744-b4d5a64e052f","Type":"ContainerStarted","Data":"0ced87072a4c6dee6be576ea2e1d19fa735211086f9cce3e15fd7355425a57b3"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.151569 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.157426 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" event={"ID":"229f54b7-db5c-48f8-9188-8ba38df574b5","Type":"ContainerStarted","Data":"a48da63be16c9b14dd98592d5e4ff9bd2795eab34e2e816e714e882158b514bf"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.157671 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.166479 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" event={"ID":"c5040ae4-3ac1-4bf6-a982-4dd494402e9f","Type":"ContainerStarted","Data":"88b1950ae5a1e5d59814f6fcd3f6f90e2ac06e76176b04ebd44288b61b066b25"} Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.166615 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.170074 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" podStartSLOduration=3.927175121 podStartE2EDuration="12.170046741s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.330212523 +0000 UTC m=+885.605638066" lastFinishedPulling="2025-06-06 09:28:02.573084143 +0000 UTC m=+893.848509686" observedRunningTime="2025-06-06 09:28:04.142707482 +0000 UTC m=+895.418133025" watchObservedRunningTime="2025-06-06 09:28:04.170046741 +0000 UTC m=+895.445472284" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.170327 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/crc-debug-k6slv" podStartSLOduration=3.170320098 podStartE2EDuration="3.170320098s" podCreationTimestamp="2025-06-06 09:28:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:28:04.165794802 +0000 UTC m=+895.441220345" watchObservedRunningTime="2025-06-06 09:28:04.170320098 +0000 UTC m=+895.445745641" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.204811 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" podStartSLOduration=4.694627392 podStartE2EDuration="12.20479138s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:55.061694453 +0000 UTC m=+886.337119996" lastFinishedPulling="2025-06-06 09:28:02.571858441 +0000 UTC m=+893.847283984" observedRunningTime="2025-06-06 09:28:04.201871066 +0000 UTC m=+895.477296609" watchObservedRunningTime="2025-06-06 09:28:04.20479138 +0000 UTC m=+895.480216923" Jun 06 09:28:04 crc kubenswrapper[4911]: I0606 09:28:04.260538 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" podStartSLOduration=4.033101993 podStartE2EDuration="12.260513986s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.351404066 +0000 UTC m=+885.626829619" lastFinishedPulling="2025-06-06 09:28:02.578816079 +0000 UTC m=+893.854241612" observedRunningTime="2025-06-06 09:28:04.256647377 +0000 UTC m=+895.532072920" watchObservedRunningTime="2025-06-06 09:28:04.260513986 +0000 UTC m=+895.535939529" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.062614 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-764c8d9cbc-rtgz2" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.089556 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" podStartSLOduration=4.026487713 podStartE2EDuration="13.089527602s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:53.509036888 +0000 UTC m=+884.784462421" lastFinishedPulling="2025-06-06 09:28:02.572076767 +0000 UTC m=+893.847502310" observedRunningTime="2025-06-06 09:28:04.290618677 +0000 UTC m=+895.566044220" watchObservedRunningTime="2025-06-06 09:28:05.089527602 +0000 UTC m=+896.364953155" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.174653 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" event={"ID":"2ac3f341-0925-45e2-a1fe-f356cde13a14","Type":"ContainerStarted","Data":"0fe0f4daab68a14ae10bb8c84e172fd935c57e5a9767da55fdc1abc523fbeea6"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.174741 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.176545 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" event={"ID":"639b9190-f625-453c-8ab5-d48b2140f801","Type":"ContainerStarted","Data":"a682cc4e6cdd314000f9acfe7b533d88b928337f6e2dd6a0113abc14b91f074f"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.176644 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.179230 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" event={"ID":"0243dd73-76b2-4168-8106-14676305be39","Type":"ContainerStarted","Data":"59e6df0f1dae6b856b802ca33d6817d03a66b69c6f65241db60fa749ebc5358f"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.179430 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.181042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" event={"ID":"33d5bd3b-0223-4a9b-93a7-85004ae5f40a","Type":"ContainerStarted","Data":"17615f65e47828a6ee4e277418444173fc3efc1238e46b8eeb9246ef31ca2bd2"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.181131 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.183312 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" event={"ID":"06051762-e014-4c42-9e66-e34da6b618c8","Type":"ContainerStarted","Data":"c2c84c3840ecf09aa88dcc01eb1399d77bd2c13c411b50c6945de83fe0c8bf24"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.183413 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.184903 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" event={"ID":"62a5da69-9938-40e9-944a-889b6a7fcc04","Type":"ContainerStarted","Data":"56699c6550ea50d70a169318ea5a302e9174ae2877891204a65471b432a83b33"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.185050 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.186609 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" event={"ID":"39ddd5bc-de82-4e82-9744-b4d5a64e052f","Type":"ContainerStarted","Data":"dbf01955783b28b128af1f35c3af5769ebe735086830f5a37f6c47489336449c"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.188999 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" event={"ID":"b6cd86f9-52b4-430f-b1d6-105fe436aff6","Type":"ContainerStarted","Data":"31f995d17643d240e6fcf41cac6cdadf878152690385d47a37dc3de34af47ce4"} Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.200457 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" podStartSLOduration=4.339831253 podStartE2EDuration="13.200433491s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:53.718671783 +0000 UTC m=+884.994097326" lastFinishedPulling="2025-06-06 09:28:02.579274021 +0000 UTC m=+893.854699564" observedRunningTime="2025-06-06 09:28:05.193065262 +0000 UTC m=+896.468490805" watchObservedRunningTime="2025-06-06 09:28:05.200433491 +0000 UTC m=+896.475859034" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.208718 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" podStartSLOduration=4.969060095 podStartE2EDuration="13.208701962s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.324564779 +0000 UTC m=+885.599990322" lastFinishedPulling="2025-06-06 09:28:02.564206646 +0000 UTC m=+893.839632189" observedRunningTime="2025-06-06 09:28:05.206616019 +0000 UTC m=+896.482041582" watchObservedRunningTime="2025-06-06 09:28:05.208701962 +0000 UTC m=+896.484127495" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.224619 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" podStartSLOduration=5.138723167 podStartE2EDuration="13.224597149s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.497807952 +0000 UTC m=+885.773233495" lastFinishedPulling="2025-06-06 09:28:02.583681934 +0000 UTC m=+893.859107477" observedRunningTime="2025-06-06 09:28:05.221013627 +0000 UTC m=+896.496439170" watchObservedRunningTime="2025-06-06 09:28:05.224597149 +0000 UTC m=+896.500022692" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.239612 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" podStartSLOduration=4.814372786 podStartE2EDuration="13.239595283s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.154572668 +0000 UTC m=+885.429998211" lastFinishedPulling="2025-06-06 09:28:02.579795165 +0000 UTC m=+893.855220708" observedRunningTime="2025-06-06 09:28:05.236529124 +0000 UTC m=+896.511954667" watchObservedRunningTime="2025-06-06 09:28:05.239595283 +0000 UTC m=+896.515020826" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.256408 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" podStartSLOduration=5.044112166 podStartE2EDuration="13.256391903s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.351858417 +0000 UTC m=+885.627283960" lastFinishedPulling="2025-06-06 09:28:02.564138154 +0000 UTC m=+893.839563697" observedRunningTime="2025-06-06 09:28:05.25158033 +0000 UTC m=+896.527005873" watchObservedRunningTime="2025-06-06 09:28:05.256391903 +0000 UTC m=+896.531817446" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.269218 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" podStartSLOduration=5.049971566 podStartE2EDuration="13.26919867s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.351801906 +0000 UTC m=+885.627227449" lastFinishedPulling="2025-06-06 09:28:02.57102901 +0000 UTC m=+893.846454553" observedRunningTime="2025-06-06 09:28:05.265474285 +0000 UTC m=+896.540899828" watchObservedRunningTime="2025-06-06 09:28:05.26919867 +0000 UTC m=+896.544624213" Jun 06 09:28:05 crc kubenswrapper[4911]: I0606 09:28:05.283758 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" podStartSLOduration=5.260464553 podStartE2EDuration="13.283739863s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.551288541 +0000 UTC m=+885.826714094" lastFinishedPulling="2025-06-06 09:28:02.574563861 +0000 UTC m=+893.849989404" observedRunningTime="2025-06-06 09:28:05.278196181 +0000 UTC m=+896.553621734" watchObservedRunningTime="2025-06-06 09:28:05.283739863 +0000 UTC m=+896.559165406" Jun 06 09:28:06 crc kubenswrapper[4911]: I0606 09:28:06.197259 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" Jun 06 09:28:10 crc kubenswrapper[4911]: I0606 09:28:10.229449 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" event={"ID":"6ae578bf-06e2-4fcd-b272-84216e832cb2","Type":"ContainerStarted","Data":"265d14134899b7eb6e0f685b0f892c6abdb4797a7c26829672eada0ea9b97c26"} Jun 06 09:28:10 crc kubenswrapper[4911]: I0606 09:28:10.231072 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" Jun 06 09:28:10 crc kubenswrapper[4911]: I0606 09:28:10.248961 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" podStartSLOduration=2.922842109 podStartE2EDuration="18.248943681s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.506601447 +0000 UTC m=+885.782026990" lastFinishedPulling="2025-06-06 09:28:09.832703019 +0000 UTC m=+901.108128562" observedRunningTime="2025-06-06 09:28:10.247620047 +0000 UTC m=+901.523045610" watchObservedRunningTime="2025-06-06 09:28:10.248943681 +0000 UTC m=+901.524369224" Jun 06 09:28:12 crc kubenswrapper[4911]: I0606 09:28:12.834289 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-9889b4756-xv9ps" Jun 06 09:28:12 crc kubenswrapper[4911]: I0606 09:28:12.850397 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-b554678df-g2vch" Jun 06 09:28:12 crc kubenswrapper[4911]: I0606 09:28:12.861836 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-97b97479c-j5r77" Jun 06 09:28:12 crc kubenswrapper[4911]: I0606 09:28:12.897066 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5486f4b54f-klhdq" Jun 06 09:28:12 crc kubenswrapper[4911]: I0606 09:28:12.930855 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7777cf768b-l4tg9" Jun 06 09:28:12 crc kubenswrapper[4911]: I0606 09:28:12.967713 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-68f4bbb747-rsbj6" Jun 06 09:28:12 crc kubenswrapper[4911]: I0606 09:28:12.986580 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5ccbd96f89-fw9ps" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.039136 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-75b8755b74-8djvm" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.106236 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-57f4dc9749-jnxvw" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.142086 4911 scope.go:117] "RemoveContainer" containerID="3aa905ce217b286ab5180b74839ea30e62970cb64fa7bbbc0f69dffd85c51aaf" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.162109 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-664db87fd8-hfzdg" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.310282 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-58f798889d-4xw87" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.335286 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-857f9d6b88-ggz4c" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.342365 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-7d4bbc7f54-rgdz4" Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.390197 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/crc-debug-k6slv"] Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.390457 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/crc-debug-k6slv" podUID="ec8129ee-9f25-4768-9cfd-3319551392fd" containerName="container-00" containerID="cri-o://c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb" gracePeriod=2 Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.396588 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/crc-debug-k6slv"] Jun 06 09:28:13 crc kubenswrapper[4911]: I0606 09:28:13.499493 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-7779c57cf7-s7shc" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.097331 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.265230 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8129ee-9f25-4768-9cfd-3319551392fd-host\") pod \"ec8129ee-9f25-4768-9cfd-3319551392fd\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.265491 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrfqn\" (UniqueName: \"kubernetes.io/projected/ec8129ee-9f25-4768-9cfd-3319551392fd-kube-api-access-nrfqn\") pod \"ec8129ee-9f25-4768-9cfd-3319551392fd\" (UID: \"ec8129ee-9f25-4768-9cfd-3319551392fd\") " Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.265595 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ec8129ee-9f25-4768-9cfd-3319551392fd-host" (OuterVolumeSpecName: "host") pod "ec8129ee-9f25-4768-9cfd-3319551392fd" (UID: "ec8129ee-9f25-4768-9cfd-3319551392fd"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.265774 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec8129ee-9f25-4768-9cfd-3319551392fd-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.267360 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" event={"ID":"08567c0a-fcf6-4ed2-8477-bccd389f2a6d","Type":"ContainerStarted","Data":"4881a1f8a8b587c123b3509bb8a2d25b90c2b43089495e2f62aaf7bf9d536f76"} Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.271037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" event={"ID":"4ed91cbd-66c2-4144-8770-c1495382976a","Type":"ContainerStarted","Data":"561929299c45269f52804c8facf67390c1ab59842c664cb45688da38f12b437c"} Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.271402 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.272258 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec8129ee-9f25-4768-9cfd-3319551392fd-kube-api-access-nrfqn" (OuterVolumeSpecName: "kube-api-access-nrfqn") pod "ec8129ee-9f25-4768-9cfd-3319551392fd" (UID: "ec8129ee-9f25-4768-9cfd-3319551392fd"). InnerVolumeSpecName "kube-api-access-nrfqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.274068 4911 generic.go:334] "Generic (PLEG): container finished" podID="ec8129ee-9f25-4768-9cfd-3319551392fd" containerID="c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb" exitCode=0 Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.274144 4911 scope.go:117] "RemoveContainer" containerID="c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.274189 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/crc-debug-k6slv" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.276033 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" event={"ID":"dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944","Type":"ContainerStarted","Data":"d921715ca74d32be1a02c573bb74849863082e913fd63a43fa373823087ff766"} Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.276304 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.283425 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" event={"ID":"2b8c5692-2750-401d-bee2-4717f71fc6df","Type":"ContainerStarted","Data":"8e6d6c68ee1ffd8573801520b3159805cf2346348098557b95559fc256c3c4a8"} Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.283629 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.284689 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-67ff8584d-djvws" podStartSLOduration=2.709274387 podStartE2EDuration="21.284674773s" podCreationTimestamp="2025-06-06 09:27:53 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.564104569 +0000 UTC m=+885.839530312" lastFinishedPulling="2025-06-06 09:28:13.139505155 +0000 UTC m=+904.414930698" observedRunningTime="2025-06-06 09:28:14.281908552 +0000 UTC m=+905.557334095" watchObservedRunningTime="2025-06-06 09:28:14.284674773 +0000 UTC m=+905.560100316" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.290409 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" event={"ID":"5bddb4bd-da2f-405e-a72e-3b89c3526010","Type":"ContainerStarted","Data":"6f25e55afa13ed483b0f15ea0c6aa14eab388a77f572fe11817783f411210856"} Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.290643 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.303077 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" podStartSLOduration=4.386271922 podStartE2EDuration="22.303055423s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.554614906 +0000 UTC m=+885.830040449" lastFinishedPulling="2025-06-06 09:28:12.471398407 +0000 UTC m=+903.746823950" observedRunningTime="2025-06-06 09:28:14.298450975 +0000 UTC m=+905.573876528" watchObservedRunningTime="2025-06-06 09:28:14.303055423 +0000 UTC m=+905.578480966" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.307589 4911 scope.go:117] "RemoveContainer" containerID="c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb" Jun 06 09:28:14 crc kubenswrapper[4911]: E0606 09:28:14.308080 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb\": container with ID starting with c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb not found: ID does not exist" containerID="c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.308159 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb"} err="failed to get container status \"c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb\": rpc error: code = NotFound desc = could not find container \"c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb\": container with ID starting with c4cf84264fdcabe8ab1788b98ccc28b3bad753eb0ed5520f24b4fd1b2cd843bb not found: ID does not exist" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.318215 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" podStartSLOduration=3.601549789 podStartE2EDuration="22.31819252s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.505066628 +0000 UTC m=+885.780492171" lastFinishedPulling="2025-06-06 09:28:13.221709359 +0000 UTC m=+904.497134902" observedRunningTime="2025-06-06 09:28:14.313554152 +0000 UTC m=+905.588979705" watchObservedRunningTime="2025-06-06 09:28:14.31819252 +0000 UTC m=+905.593618063" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.365884 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" podStartSLOduration=4.449004496 podStartE2EDuration="22.365868379s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.577279986 +0000 UTC m=+885.852705529" lastFinishedPulling="2025-06-06 09:28:12.494143869 +0000 UTC m=+903.769569412" observedRunningTime="2025-06-06 09:28:14.361915208 +0000 UTC m=+905.637340751" watchObservedRunningTime="2025-06-06 09:28:14.365868379 +0000 UTC m=+905.641293922" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.367034 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" podStartSLOduration=2.825827296 podStartE2EDuration="22.367028269s" podCreationTimestamp="2025-06-06 09:27:52 +0000 UTC" firstStartedPulling="2025-06-06 09:27:54.566822589 +0000 UTC m=+885.842248132" lastFinishedPulling="2025-06-06 09:28:14.108023572 +0000 UTC m=+905.383449105" observedRunningTime="2025-06-06 09:28:14.336802247 +0000 UTC m=+905.612227790" watchObservedRunningTime="2025-06-06 09:28:14.367028269 +0000 UTC m=+905.642453802" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.368162 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrfqn\" (UniqueName: \"kubernetes.io/projected/ec8129ee-9f25-4768-9cfd-3319551392fd-kube-api-access-nrfqn\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:14 crc kubenswrapper[4911]: I0606 09:28:14.746028 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5b4ccb8c4-t77qw" Jun 06 09:28:15 crc kubenswrapper[4911]: I0606 09:28:15.960123 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec8129ee-9f25-4768-9cfd-3319551392fd" path="/var/lib/kubelet/pods/ec8129ee-9f25-4768-9cfd-3319551392fd/volumes" Jun 06 09:28:23 crc kubenswrapper[4911]: I0606 09:28:23.081323 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5df6744645-slsxs" Jun 06 09:28:23 crc kubenswrapper[4911]: I0606 09:28:23.270010 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9f78645d5-6p5gr" Jun 06 09:28:23 crc kubenswrapper[4911]: I0606 09:28:23.516360 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-884d667-t6d6w" Jun 06 09:28:23 crc kubenswrapper[4911]: I0606 09:28:23.536044 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-6db7bffb67-stlv6" Jun 06 09:28:23 crc kubenswrapper[4911]: I0606 09:28:23.988714 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn" Jun 06 09:28:24 crc kubenswrapper[4911]: I0606 09:28:24.299820 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:28:24 crc kubenswrapper[4911]: I0606 09:28:24.299886 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.418949 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-546489d6df-7hqx2"] Jun 06 09:28:37 crc kubenswrapper[4911]: E0606 09:28:37.419810 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec8129ee-9f25-4768-9cfd-3319551392fd" containerName="container-00" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.419823 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec8129ee-9f25-4768-9cfd-3319551392fd" containerName="container-00" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.419960 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec8129ee-9f25-4768-9cfd-3319551392fd" containerName="container-00" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.420835 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.426368 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.426807 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-rd76g" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.436275 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-546489d6df-7hqx2"] Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.483643 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7566756bf-8brnr"] Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.485174 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.490232 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.499555 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7566756bf-8brnr"] Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.596329 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd4hh\" (UniqueName: \"kubernetes.io/projected/9ab34f3d-4a2d-42a5-8bd4-877893166667-kube-api-access-vd4hh\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.596427 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-config\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.596456 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjrm7\" (UniqueName: \"kubernetes.io/projected/fec3385d-41ae-41d2-80d4-4af58a656162-kube-api-access-sjrm7\") pod \"dnsmasq-dns-546489d6df-7hqx2\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.596491 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-dns-svc\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.596524 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec3385d-41ae-41d2-80d4-4af58a656162-config\") pod \"dnsmasq-dns-546489d6df-7hqx2\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.697865 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd4hh\" (UniqueName: \"kubernetes.io/projected/9ab34f3d-4a2d-42a5-8bd4-877893166667-kube-api-access-vd4hh\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.697948 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-config\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.697982 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjrm7\" (UniqueName: \"kubernetes.io/projected/fec3385d-41ae-41d2-80d4-4af58a656162-kube-api-access-sjrm7\") pod \"dnsmasq-dns-546489d6df-7hqx2\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.698023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-dns-svc\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.698056 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec3385d-41ae-41d2-80d4-4af58a656162-config\") pod \"dnsmasq-dns-546489d6df-7hqx2\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.699209 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec3385d-41ae-41d2-80d4-4af58a656162-config\") pod \"dnsmasq-dns-546489d6df-7hqx2\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.700269 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-config\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.701062 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-dns-svc\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.720967 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd4hh\" (UniqueName: \"kubernetes.io/projected/9ab34f3d-4a2d-42a5-8bd4-877893166667-kube-api-access-vd4hh\") pod \"dnsmasq-dns-7566756bf-8brnr\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.802639 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:37 crc kubenswrapper[4911]: I0606 09:28:37.865012 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjrm7\" (UniqueName: \"kubernetes.io/projected/fec3385d-41ae-41d2-80d4-4af58a656162-kube-api-access-sjrm7\") pod \"dnsmasq-dns-546489d6df-7hqx2\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:38 crc kubenswrapper[4911]: I0606 09:28:38.052270 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:38 crc kubenswrapper[4911]: I0606 09:28:38.216867 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7566756bf-8brnr"] Jun 06 09:28:38 crc kubenswrapper[4911]: W0606 09:28:38.226406 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ab34f3d_4a2d_42a5_8bd4_877893166667.slice/crio-22c80d36cea9db2616c5336a09878963441d33f81eaab3b207952c20b545ece2 WatchSource:0}: Error finding container 22c80d36cea9db2616c5336a09878963441d33f81eaab3b207952c20b545ece2: Status 404 returned error can't find the container with id 22c80d36cea9db2616c5336a09878963441d33f81eaab3b207952c20b545ece2 Jun 06 09:28:38 crc kubenswrapper[4911]: I0606 09:28:38.448384 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7566756bf-8brnr" event={"ID":"9ab34f3d-4a2d-42a5-8bd4-877893166667","Type":"ContainerStarted","Data":"22c80d36cea9db2616c5336a09878963441d33f81eaab3b207952c20b545ece2"} Jun 06 09:28:38 crc kubenswrapper[4911]: I0606 09:28:38.579317 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-546489d6df-7hqx2"] Jun 06 09:28:38 crc kubenswrapper[4911]: W0606 09:28:38.586313 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfec3385d_41ae_41d2_80d4_4af58a656162.slice/crio-c92e8950975863fd294aeca96ee0fcf0414b8957a6f10ff0e61a07ce2c8c2ace WatchSource:0}: Error finding container c92e8950975863fd294aeca96ee0fcf0414b8957a6f10ff0e61a07ce2c8c2ace: Status 404 returned error can't find the container with id c92e8950975863fd294aeca96ee0fcf0414b8957a6f10ff0e61a07ce2c8c2ace Jun 06 09:28:39 crc kubenswrapper[4911]: I0606 09:28:39.457976 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-546489d6df-7hqx2" event={"ID":"fec3385d-41ae-41d2-80d4-4af58a656162","Type":"ContainerStarted","Data":"c92e8950975863fd294aeca96ee0fcf0414b8957a6f10ff0e61a07ce2c8c2ace"} Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.542101 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-546489d6df-7hqx2"] Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.573337 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5544c68b5-8q4nx"] Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.579043 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.612657 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5544c68b5-8q4nx"] Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.742591 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-dns-svc\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.742662 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9887\" (UniqueName: \"kubernetes.io/projected/94fcf722-4611-47e8-b20f-c57d35e8ecb7-kube-api-access-c9887\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.742740 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-config\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.832383 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7566756bf-8brnr"] Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.851542 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-dns-svc\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.851619 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9887\" (UniqueName: \"kubernetes.io/projected/94fcf722-4611-47e8-b20f-c57d35e8ecb7-kube-api-access-c9887\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.851675 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-config\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.852917 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-config\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.853729 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-dns-svc\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.875023 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d5db84f4f-667cv"] Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.880748 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.886406 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d5db84f4f-667cv"] Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.888485 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9887\" (UniqueName: \"kubernetes.io/projected/94fcf722-4611-47e8-b20f-c57d35e8ecb7-kube-api-access-c9887\") pod \"dnsmasq-dns-5544c68b5-8q4nx\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.908079 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.952639 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8dg\" (UniqueName: \"kubernetes.io/projected/f904c35e-e8d4-40be-97b0-3c897429628b-kube-api-access-jc8dg\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.953191 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-dns-svc\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:40 crc kubenswrapper[4911]: I0606 09:28:40.953272 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-config\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.054833 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-dns-svc\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.054919 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-config\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.055014 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8dg\" (UniqueName: \"kubernetes.io/projected/f904c35e-e8d4-40be-97b0-3c897429628b-kube-api-access-jc8dg\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.059194 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-dns-svc\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.059219 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-config\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.084512 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8dg\" (UniqueName: \"kubernetes.io/projected/f904c35e-e8d4-40be-97b0-3c897429628b-kube-api-access-jc8dg\") pod \"dnsmasq-dns-d5db84f4f-667cv\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.313523 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.422746 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5544c68b5-8q4nx"] Jun 06 09:28:41 crc kubenswrapper[4911]: W0606 09:28:41.432734 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94fcf722_4611_47e8_b20f_c57d35e8ecb7.slice/crio-70704d9a240a011440909292ba2c9ac26650e0cd8196b60b51939e40b3ccb58d WatchSource:0}: Error finding container 70704d9a240a011440909292ba2c9ac26650e0cd8196b60b51939e40b3ccb58d: Status 404 returned error can't find the container with id 70704d9a240a011440909292ba2c9ac26650e0cd8196b60b51939e40b3ccb58d Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.510264 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" event={"ID":"94fcf722-4611-47e8-b20f-c57d35e8ecb7","Type":"ContainerStarted","Data":"70704d9a240a011440909292ba2c9ac26650e0cd8196b60b51939e40b3ccb58d"} Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.703288 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.704501 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.706146 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.707217 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.707471 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.707548 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-clfpc" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.707746 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.707817 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.707895 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.727327 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.790221 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d5db84f4f-667cv"] Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866064 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866169 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866200 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866225 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-server-conf\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866244 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abf307d7-9aa9-4d2f-9943-e3a085568096-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866268 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866291 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44vbj\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-kube-api-access-44vbj\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866317 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-config-data\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866332 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abf307d7-9aa9-4d2f-9943-e3a085568096-pod-info\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866353 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.866370 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.967872 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-server-conf\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.967937 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abf307d7-9aa9-4d2f-9943-e3a085568096-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.967963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.967987 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44vbj\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-kube-api-access-44vbj\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.968006 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-config-data\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.968023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abf307d7-9aa9-4d2f-9943-e3a085568096-pod-info\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.968042 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.968062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.968148 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.968177 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.968202 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.969299 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.969450 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.970080 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.970539 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.970580 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-server-conf\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.970915 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-config-data\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.975959 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abf307d7-9aa9-4d2f-9943-e3a085568096-pod-info\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.975957 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abf307d7-9aa9-4d2f-9943-e3a085568096-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.976206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.982409 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.989361 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44vbj\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-kube-api-access-44vbj\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.994461 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.995733 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.997801 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jun 06 09:28:41 crc kubenswrapper[4911]: I0606 09:28:41.998002 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:41.999564 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:41.999739 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:41.999849 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-f7f6b" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.000041 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.000500 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.008945 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.052707 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " pod="openstack/rabbitmq-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.171739 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v99v4\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-kube-api-access-v99v4\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.171817 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.171884 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.171928 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.171946 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.171968 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.172165 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.172233 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.172316 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.172396 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.172501 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.274949 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275059 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275131 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275161 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275192 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275223 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v99v4\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-kube-api-access-v99v4\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275251 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275292 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275327 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275345 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275366 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.275466 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.276607 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.277986 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.278132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.278422 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.278725 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.280063 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.281264 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.282341 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.284751 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.298268 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.299108 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v99v4\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-kube-api-access-v99v4\") pod \"rabbitmq-cell1-server-0\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.345586 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.391298 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:28:42 crc kubenswrapper[4911]: I0606 09:28:42.519379 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" event={"ID":"f904c35e-e8d4-40be-97b0-3c897429628b","Type":"ContainerStarted","Data":"d9f3d120cb9b2d1aea5964703d8cba56d80160b8a3ff2e1cdeacd25baa3e329f"} Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.763168 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.765004 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.767356 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-tjkwd" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.767478 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.767663 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.767681 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.767841 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.778069 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.781671 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.888844 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.891830 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.899296 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.899610 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-n5pnf" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.899781 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.903504 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.905763 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946438 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946496 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946527 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946564 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5rgx\" (UniqueName: \"kubernetes.io/projected/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-kube-api-access-c5rgx\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946746 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946796 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946832 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946889 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:44 crc kubenswrapper[4911]: I0606 09:28:44.946935 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-secrets\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.048863 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.048920 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.048951 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.048971 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049047 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049069 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049115 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049139 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049161 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fc9662e0-ac5c-46b2-b608-945afee3f990-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049178 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-secrets\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049200 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049260 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049290 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049315 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbncn\" (UniqueName: \"kubernetes.io/projected/fc9662e0-ac5c-46b2-b608-945afee3f990-kube-api-access-sbncn\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.049711 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.050353 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.050391 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.050424 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.050445 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5rgx\" (UniqueName: \"kubernetes.io/projected/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-kube-api-access-c5rgx\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.050576 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.051343 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.051381 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.055199 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.057550 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.057566 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.068333 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-secrets\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.077773 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5rgx\" (UniqueName: \"kubernetes.io/projected/4a2cb894-6a4a-4a24-aedb-b3dabe082a4b-kube-api-access-c5rgx\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.090207 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b\") " pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.130744 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.131755 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.133807 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-6d5rr" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.134048 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.135014 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.142043 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.151687 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.151750 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.151779 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.151823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.152896 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.152992 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.153081 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fc9662e0-ac5c-46b2-b608-945afee3f990-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.153856 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.153900 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.153944 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.153987 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbncn\" (UniqueName: \"kubernetes.io/projected/fc9662e0-ac5c-46b2-b608-945afee3f990-kube-api-access-sbncn\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.154173 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9662e0-ac5c-46b2-b608-945afee3f990-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.154230 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.154254 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fc9662e0-ac5c-46b2-b608-945afee3f990-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.159343 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.159513 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.160558 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/fc9662e0-ac5c-46b2-b608-945afee3f990-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.175823 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbncn\" (UniqueName: \"kubernetes.io/projected/fc9662e0-ac5c-46b2-b608-945afee3f990-kube-api-access-sbncn\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.180388 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fc9662e0-ac5c-46b2-b608-945afee3f990\") " pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.210736 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.255888 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.255955 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.255999 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-kolla-config\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.256452 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-config-data\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.256562 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clrg9\" (UniqueName: \"kubernetes.io/projected/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-kube-api-access-clrg9\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.358744 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.358799 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.359647 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-kolla-config\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.359734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-config-data\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.359769 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clrg9\" (UniqueName: \"kubernetes.io/projected/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-kube-api-access-clrg9\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.360132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-kolla-config\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.360461 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-config-data\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.363394 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.364743 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.382724 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clrg9\" (UniqueName: \"kubernetes.io/projected/7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20-kube-api-access-clrg9\") pod \"memcached-0\" (UID: \"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20\") " pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.387626 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.534759 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.848512 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.853795 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.857490 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2j65b" Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.871200 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:28:45 crc kubenswrapper[4911]: I0606 09:28:45.970255 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snx4r\" (UniqueName: \"kubernetes.io/projected/3a014170-9bee-45a3-af73-3a5c0418de93-kube-api-access-snx4r\") pod \"kube-state-metrics-0\" (UID: \"3a014170-9bee-45a3-af73-3a5c0418de93\") " pod="openstack/kube-state-metrics-0" Jun 06 09:28:46 crc kubenswrapper[4911]: I0606 09:28:46.072519 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snx4r\" (UniqueName: \"kubernetes.io/projected/3a014170-9bee-45a3-af73-3a5c0418de93-kube-api-access-snx4r\") pod \"kube-state-metrics-0\" (UID: \"3a014170-9bee-45a3-af73-3a5c0418de93\") " pod="openstack/kube-state-metrics-0" Jun 06 09:28:46 crc kubenswrapper[4911]: I0606 09:28:46.095349 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snx4r\" (UniqueName: \"kubernetes.io/projected/3a014170-9bee-45a3-af73-3a5c0418de93-kube-api-access-snx4r\") pod \"kube-state-metrics-0\" (UID: \"3a014170-9bee-45a3-af73-3a5c0418de93\") " pod="openstack/kube-state-metrics-0" Jun 06 09:28:46 crc kubenswrapper[4911]: I0606 09:28:46.193241 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.288745 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4dlv9"] Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.291222 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.297159 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-npmps" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.297456 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.297591 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.303635 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4dlv9"] Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.312104 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-jjkvl"] Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.314175 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328387 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-run\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328434 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a8407a6-611d-477c-8530-9c1728797994-combined-ca-bundle\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328461 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-log-ovn\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328500 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbb5v\" (UniqueName: \"kubernetes.io/projected/5a8407a6-611d-477c-8530-9c1728797994-kube-api-access-zbb5v\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/372d84e8-5035-48ca-9ee9-676bf64886c9-scripts\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328555 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a8407a6-611d-477c-8530-9c1728797994-scripts\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328581 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-log\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328598 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-etc-ovs\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328666 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxtgx\" (UniqueName: \"kubernetes.io/projected/372d84e8-5035-48ca-9ee9-676bf64886c9-kube-api-access-lxtgx\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328685 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-run-ovn\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328708 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-run\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328723 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-lib\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.328752 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8407a6-611d-477c-8530-9c1728797994-ovn-controller-tls-certs\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.329162 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jjkvl"] Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.429874 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-run\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.429996 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-lib\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430042 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8407a6-611d-477c-8530-9c1728797994-ovn-controller-tls-certs\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430132 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-run\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430156 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a8407a6-611d-477c-8530-9c1728797994-combined-ca-bundle\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430186 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-log-ovn\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430224 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbb5v\" (UniqueName: \"kubernetes.io/projected/5a8407a6-611d-477c-8530-9c1728797994-kube-api-access-zbb5v\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430254 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/372d84e8-5035-48ca-9ee9-676bf64886c9-scripts\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430282 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a8407a6-611d-477c-8530-9c1728797994-scripts\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430308 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-log\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430330 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-etc-ovs\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430377 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxtgx\" (UniqueName: \"kubernetes.io/projected/372d84e8-5035-48ca-9ee9-676bf64886c9-kube-api-access-lxtgx\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430401 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-run-ovn\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430658 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-run\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-run-ovn\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.430830 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-run\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.431022 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-lib\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.431208 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5a8407a6-611d-477c-8530-9c1728797994-var-log-ovn\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.432475 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-etc-ovs\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.432559 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/372d84e8-5035-48ca-9ee9-676bf64886c9-var-log\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.436037 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a8407a6-611d-477c-8530-9c1728797994-scripts\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.436079 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/372d84e8-5035-48ca-9ee9-676bf64886c9-scripts\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.439191 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8407a6-611d-477c-8530-9c1728797994-ovn-controller-tls-certs\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.445406 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a8407a6-611d-477c-8530-9c1728797994-combined-ca-bundle\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.447931 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbb5v\" (UniqueName: \"kubernetes.io/projected/5a8407a6-611d-477c-8530-9c1728797994-kube-api-access-zbb5v\") pod \"ovn-controller-4dlv9\" (UID: \"5a8407a6-611d-477c-8530-9c1728797994\") " pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.448504 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxtgx\" (UniqueName: \"kubernetes.io/projected/372d84e8-5035-48ca-9ee9-676bf64886c9-kube-api-access-lxtgx\") pod \"ovn-controller-ovs-jjkvl\" (UID: \"372d84e8-5035-48ca-9ee9-676bf64886c9\") " pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.615204 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4dlv9" Jun 06 09:28:49 crc kubenswrapper[4911]: I0606 09:28:49.641359 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.730398 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.732448 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.734692 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.734904 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.736927 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-jfxjx" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.739117 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.812850 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.866954 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.867020 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.867041 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.867071 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.867158 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mtjt\" (UniqueName: \"kubernetes.io/projected/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-kube-api-access-4mtjt\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.968789 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.968895 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.968927 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.968971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.969047 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mtjt\" (UniqueName: \"kubernetes.io/projected/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-kube-api-access-4mtjt\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.969261 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.970031 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.976137 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.976361 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.985963 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mtjt\" (UniqueName: \"kubernetes.io/projected/ececc27b-f281-4e57-9c46-0cbe3a1ab2d7-kube-api-access-4mtjt\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:51 crc kubenswrapper[4911]: I0606 09:28:51.992597 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7\") " pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.056173 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.563283 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.628890 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abf307d7-9aa9-4d2f-9943-e3a085568096","Type":"ContainerStarted","Data":"58588c900b6cd6229f07a204fe03e3a260511e1e4844bce13cd9c4eb378f8f04"} Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.636084 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fc9662e0-ac5c-46b2-b608-945afee3f990","Type":"ContainerStarted","Data":"e4f68756da805df680d7acadfec22cf3b9b5766517e901b5917bac667b0dde15"} Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.761156 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.770448 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jun 06 09:28:52 crc kubenswrapper[4911]: W0606 09:28:52.819544 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fbe9d9e_a221_4ee6_9fd4_f5841ba38f20.slice/crio-9cc1f1abd0e2373ceec8691bb2ecd0920ca5d32238987c64572b07043c136ef4 WatchSource:0}: Error finding container 9cc1f1abd0e2373ceec8691bb2ecd0920ca5d32238987c64572b07043c136ef4: Status 404 returned error can't find the container with id 9cc1f1abd0e2373ceec8691bb2ecd0920ca5d32238987c64572b07043c136ef4 Jun 06 09:28:52 crc kubenswrapper[4911]: W0606 09:28:52.819914 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a6b5cde_8e4b_4433_a3b3_4f79eb0754b4.slice/crio-1239103cc83fefa92a7944200a759a97a358828d5f69d6891e4b31aeede6a5c6 WatchSource:0}: Error finding container 1239103cc83fefa92a7944200a759a97a358828d5f69d6891e4b31aeede6a5c6: Status 404 returned error can't find the container with id 1239103cc83fefa92a7944200a759a97a358828d5f69d6891e4b31aeede6a5c6 Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.894278 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4dlv9"] Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.901633 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:28:52 crc kubenswrapper[4911]: W0606 09:28:52.962551 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a014170_9bee_45a3_af73_3a5c0418de93.slice/crio-5e819ab020d5fdf3b89541e988928d64f2e89c2f9a5f88337723f83989c91e1a WatchSource:0}: Error finding container 5e819ab020d5fdf3b89541e988928d64f2e89c2f9a5f88337723f83989c91e1a: Status 404 returned error can't find the container with id 5e819ab020d5fdf3b89541e988928d64f2e89c2f9a5f88337723f83989c91e1a Jun 06 09:28:52 crc kubenswrapper[4911]: I0606 09:28:52.987341 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jun 06 09:28:53 crc kubenswrapper[4911]: W0606 09:28:53.029080 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podececc27b_f281_4e57_9c46_0cbe3a1ab2d7.slice/crio-be00afde80a563bbdeec5d5bfc4d56684748ed981b5588f0c2387cf1001c088f WatchSource:0}: Error finding container be00afde80a563bbdeec5d5bfc4d56684748ed981b5588f0c2387cf1001c088f: Status 404 returned error can't find the container with id be00afde80a563bbdeec5d5bfc4d56684748ed981b5588f0c2387cf1001c088f Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.055069 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jun 06 09:28:53 crc kubenswrapper[4911]: W0606 09:28:53.060872 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a2cb894_6a4a_4a24_aedb_b3dabe082a4b.slice/crio-1cbee34f1db5f1efebea0ff6902f58c739cf13c105d12461e1d103130ce97b04 WatchSource:0}: Error finding container 1cbee34f1db5f1efebea0ff6902f58c739cf13c105d12461e1d103130ce97b04: Status 404 returned error can't find the container with id 1cbee34f1db5f1efebea0ff6902f58c739cf13c105d12461e1d103130ce97b04 Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.086225 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-jjkvl"] Jun 06 09:28:53 crc kubenswrapper[4911]: W0606 09:28:53.089836 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod372d84e8_5035_48ca_9ee9_676bf64886c9.slice/crio-9bd33ba00d684f0702df330ad5ad9a46f34e3cf0a0f11008a789a6dfa74a294c WatchSource:0}: Error finding container 9bd33ba00d684f0702df330ad5ad9a46f34e3cf0a0f11008a789a6dfa74a294c: Status 404 returned error can't find the container with id 9bd33ba00d684f0702df330ad5ad9a46f34e3cf0a0f11008a789a6dfa74a294c Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.325761 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.326927 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.328893 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-82df2" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.329807 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.329947 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.334183 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.431213 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5544c68b5-8q4nx"] Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.450926 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-697f967d49-t9lmx"] Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.452708 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.454910 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.473041 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-697f967d49-t9lmx"] Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.500975 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b9b63b-5112-46d4-98d8-01e75d78b84f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.501398 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42b9b63b-5112-46d4-98d8-01e75d78b84f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.501533 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2kjs\" (UniqueName: \"kubernetes.io/projected/42b9b63b-5112-46d4-98d8-01e75d78b84f-kube-api-access-h2kjs\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.501724 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.501882 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/42b9b63b-5112-46d4-98d8-01e75d78b84f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.603359 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42b9b63b-5112-46d4-98d8-01e75d78b84f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.603418 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-dns-svc\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.603448 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2kjs\" (UniqueName: \"kubernetes.io/projected/42b9b63b-5112-46d4-98d8-01e75d78b84f-kube-api-access-h2kjs\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.603476 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.603550 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/42b9b63b-5112-46d4-98d8-01e75d78b84f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.603696 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvtk5\" (UniqueName: \"kubernetes.io/projected/fd3241c2-4261-4525-b5dd-9a9711232726-kube-api-access-mvtk5\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.604240 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.604880 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42b9b63b-5112-46d4-98d8-01e75d78b84f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.605420 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-config\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.605496 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b9b63b-5112-46d4-98d8-01e75d78b84f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.605595 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-ovsdbserver-nb\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.610806 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/42b9b63b-5112-46d4-98d8-01e75d78b84f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.613367 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b9b63b-5112-46d4-98d8-01e75d78b84f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.622679 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2kjs\" (UniqueName: \"kubernetes.io/projected/42b9b63b-5112-46d4-98d8-01e75d78b84f-kube-api-access-h2kjs\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.629014 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"42b9b63b-5112-46d4-98d8-01e75d78b84f\") " pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.648590 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.658116 4911 generic.go:334] "Generic (PLEG): container finished" podID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerID="5c4dc630afc6f6d00bfbfc7829615022d0b7f754323120d14ee00c29dda76ffd" exitCode=0 Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.658199 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" event={"ID":"94fcf722-4611-47e8-b20f-c57d35e8ecb7","Type":"ContainerDied","Data":"5c4dc630afc6f6d00bfbfc7829615022d0b7f754323120d14ee00c29dda76ffd"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.659543 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4dlv9" event={"ID":"5a8407a6-611d-477c-8530-9c1728797994","Type":"ContainerStarted","Data":"05f1c34bfda2d4d26155f1d6bf2d3b4f53da8e7acae0bc054b49113339be8ca1"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.661252 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4","Type":"ContainerStarted","Data":"1239103cc83fefa92a7944200a759a97a358828d5f69d6891e4b31aeede6a5c6"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.663366 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20","Type":"ContainerStarted","Data":"9cc1f1abd0e2373ceec8691bb2ecd0920ca5d32238987c64572b07043c136ef4"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.669040 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a014170-9bee-45a3-af73-3a5c0418de93","Type":"ContainerStarted","Data":"5e819ab020d5fdf3b89541e988928d64f2e89c2f9a5f88337723f83989c91e1a"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.686671 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jjkvl" event={"ID":"372d84e8-5035-48ca-9ee9-676bf64886c9","Type":"ContainerStarted","Data":"9bd33ba00d684f0702df330ad5ad9a46f34e3cf0a0f11008a789a6dfa74a294c"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.699288 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7","Type":"ContainerStarted","Data":"be00afde80a563bbdeec5d5bfc4d56684748ed981b5588f0c2387cf1001c088f"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.709262 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvtk5\" (UniqueName: \"kubernetes.io/projected/fd3241c2-4261-4525-b5dd-9a9711232726-kube-api-access-mvtk5\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.709320 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-config\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.709705 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-ovsdbserver-nb\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.709764 4911 generic.go:334] "Generic (PLEG): container finished" podID="9ab34f3d-4a2d-42a5-8bd4-877893166667" containerID="08bc6c67f75741e3ed803c013053566587e37397f18a2c4e2268f7da9241052a" exitCode=0 Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.709825 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7566756bf-8brnr" event={"ID":"9ab34f3d-4a2d-42a5-8bd4-877893166667","Type":"ContainerDied","Data":"08bc6c67f75741e3ed803c013053566587e37397f18a2c4e2268f7da9241052a"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.709944 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-dns-svc\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.711766 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-config\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.713456 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-ovsdbserver-nb\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.713759 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-dns-svc\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.716794 4911 generic.go:334] "Generic (PLEG): container finished" podID="f904c35e-e8d4-40be-97b0-3c897429628b" containerID="ee430fb75688c01f8ec103d9ce71c5e541f803ed5c46fa7e651c71f75edb7755" exitCode=0 Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.716836 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" event={"ID":"f904c35e-e8d4-40be-97b0-3c897429628b","Type":"ContainerDied","Data":"ee430fb75688c01f8ec103d9ce71c5e541f803ed5c46fa7e651c71f75edb7755"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.718904 4911 generic.go:334] "Generic (PLEG): container finished" podID="fec3385d-41ae-41d2-80d4-4af58a656162" containerID="2a170211f91f2ee17c3d39ba4fbc7f86eb1aa6dea2e143fc74f63de0a1c1cbda" exitCode=0 Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.718973 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-546489d6df-7hqx2" event={"ID":"fec3385d-41ae-41d2-80d4-4af58a656162","Type":"ContainerDied","Data":"2a170211f91f2ee17c3d39ba4fbc7f86eb1aa6dea2e143fc74f63de0a1c1cbda"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.720445 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b","Type":"ContainerStarted","Data":"1cbee34f1db5f1efebea0ff6902f58c739cf13c105d12461e1d103130ce97b04"} Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.731296 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvtk5\" (UniqueName: \"kubernetes.io/projected/fd3241c2-4261-4525-b5dd-9a9711232726-kube-api-access-mvtk5\") pod \"dnsmasq-dns-697f967d49-t9lmx\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:53 crc kubenswrapper[4911]: I0606 09:28:53.795766 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.242017 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jun 06 09:28:54 crc kubenswrapper[4911]: W0606 09:28:54.285588 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42b9b63b_5112_46d4_98d8_01e75d78b84f.slice/crio-ccd2f11d758ee253b4ba7bbbf0da75882eaa8e683c5c1e5e8885ccdf46b82819 WatchSource:0}: Error finding container ccd2f11d758ee253b4ba7bbbf0da75882eaa8e683c5c1e5e8885ccdf46b82819: Status 404 returned error can't find the container with id ccd2f11d758ee253b4ba7bbbf0da75882eaa8e683c5c1e5e8885ccdf46b82819 Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.300337 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.300581 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.300629 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.301308 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7e9cf2d10460184da893979bc2c9b0439be07aa529d8fb6fe4706920d07685c7"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.301363 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://7e9cf2d10460184da893979bc2c9b0439be07aa529d8fb6fe4706920d07685c7" gracePeriod=600 Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.312857 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.316818 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.430026 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-config\") pod \"9ab34f3d-4a2d-42a5-8bd4-877893166667\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.430518 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjrm7\" (UniqueName: \"kubernetes.io/projected/fec3385d-41ae-41d2-80d4-4af58a656162-kube-api-access-sjrm7\") pod \"fec3385d-41ae-41d2-80d4-4af58a656162\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.430557 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec3385d-41ae-41d2-80d4-4af58a656162-config\") pod \"fec3385d-41ae-41d2-80d4-4af58a656162\" (UID: \"fec3385d-41ae-41d2-80d4-4af58a656162\") " Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.430654 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-dns-svc\") pod \"9ab34f3d-4a2d-42a5-8bd4-877893166667\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.430738 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vd4hh\" (UniqueName: \"kubernetes.io/projected/9ab34f3d-4a2d-42a5-8bd4-877893166667-kube-api-access-vd4hh\") pod \"9ab34f3d-4a2d-42a5-8bd4-877893166667\" (UID: \"9ab34f3d-4a2d-42a5-8bd4-877893166667\") " Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.437592 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ab34f3d-4a2d-42a5-8bd4-877893166667-kube-api-access-vd4hh" (OuterVolumeSpecName: "kube-api-access-vd4hh") pod "9ab34f3d-4a2d-42a5-8bd4-877893166667" (UID: "9ab34f3d-4a2d-42a5-8bd4-877893166667"). InnerVolumeSpecName "kube-api-access-vd4hh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.437999 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fec3385d-41ae-41d2-80d4-4af58a656162-kube-api-access-sjrm7" (OuterVolumeSpecName: "kube-api-access-sjrm7") pod "fec3385d-41ae-41d2-80d4-4af58a656162" (UID: "fec3385d-41ae-41d2-80d4-4af58a656162"). InnerVolumeSpecName "kube-api-access-sjrm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.456518 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-config" (OuterVolumeSpecName: "config") pod "9ab34f3d-4a2d-42a5-8bd4-877893166667" (UID: "9ab34f3d-4a2d-42a5-8bd4-877893166667"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.456618 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9ab34f3d-4a2d-42a5-8bd4-877893166667" (UID: "9ab34f3d-4a2d-42a5-8bd4-877893166667"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.461413 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fec3385d-41ae-41d2-80d4-4af58a656162-config" (OuterVolumeSpecName: "config") pod "fec3385d-41ae-41d2-80d4-4af58a656162" (UID: "fec3385d-41ae-41d2-80d4-4af58a656162"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.497437 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-697f967d49-t9lmx"] Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.532572 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vd4hh\" (UniqueName: \"kubernetes.io/projected/9ab34f3d-4a2d-42a5-8bd4-877893166667-kube-api-access-vd4hh\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.532617 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.532635 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjrm7\" (UniqueName: \"kubernetes.io/projected/fec3385d-41ae-41d2-80d4-4af58a656162-kube-api-access-sjrm7\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.532649 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec3385d-41ae-41d2-80d4-4af58a656162-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.532663 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ab34f3d-4a2d-42a5-8bd4-877893166667-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.732308 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="7e9cf2d10460184da893979bc2c9b0439be07aa529d8fb6fe4706920d07685c7" exitCode=0 Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.732397 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"7e9cf2d10460184da893979bc2c9b0439be07aa529d8fb6fe4706920d07685c7"} Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.732436 4911 scope.go:117] "RemoveContainer" containerID="00a6b7c957b04d6217e597e04550f48b111aa26d7d1dc819e0c3ab94dfcdb9d6" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.736340 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" event={"ID":"94fcf722-4611-47e8-b20f-c57d35e8ecb7","Type":"ContainerStarted","Data":"15b9c767bd7729c2048e7502d9f7b9b186289ea5e0192b1f89156a7eb3a3dd8a"} Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.736480 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" podUID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerName="dnsmasq-dns" containerID="cri-o://15b9c767bd7729c2048e7502d9f7b9b186289ea5e0192b1f89156a7eb3a3dd8a" gracePeriod=10 Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.736547 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.756444 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" event={"ID":"f904c35e-e8d4-40be-97b0-3c897429628b","Type":"ContainerStarted","Data":"d71ac0a6ac51eacac9267f7006737df2b9a0eebb8c69a1ca7f053c9ba77e5251"} Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.757140 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.770539 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-546489d6df-7hqx2" event={"ID":"fec3385d-41ae-41d2-80d4-4af58a656162","Type":"ContainerDied","Data":"c92e8950975863fd294aeca96ee0fcf0414b8957a6f10ff0e61a07ce2c8c2ace"} Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.770591 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546489d6df-7hqx2" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.772209 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" podStartSLOduration=3.9320255360000003 podStartE2EDuration="14.772191364s" podCreationTimestamp="2025-06-06 09:28:40 +0000 UTC" firstStartedPulling="2025-06-06 09:28:41.439781395 +0000 UTC m=+932.715206938" lastFinishedPulling="2025-06-06 09:28:52.279947213 +0000 UTC m=+943.555372766" observedRunningTime="2025-06-06 09:28:54.770928151 +0000 UTC m=+946.046353694" watchObservedRunningTime="2025-06-06 09:28:54.772191364 +0000 UTC m=+946.047616897" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.775045 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"42b9b63b-5112-46d4-98d8-01e75d78b84f","Type":"ContainerStarted","Data":"ccd2f11d758ee253b4ba7bbbf0da75882eaa8e683c5c1e5e8885ccdf46b82819"} Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.778958 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7566756bf-8brnr" event={"ID":"9ab34f3d-4a2d-42a5-8bd4-877893166667","Type":"ContainerDied","Data":"22c80d36cea9db2616c5336a09878963441d33f81eaab3b207952c20b545ece2"} Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.779064 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7566756bf-8brnr" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.794155 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" podStartSLOduration=4.308906991 podStartE2EDuration="14.794136315s" podCreationTimestamp="2025-06-06 09:28:40 +0000 UTC" firstStartedPulling="2025-06-06 09:28:41.799176163 +0000 UTC m=+933.074601706" lastFinishedPulling="2025-06-06 09:28:52.284405487 +0000 UTC m=+943.559831030" observedRunningTime="2025-06-06 09:28:54.79197505 +0000 UTC m=+946.067400603" watchObservedRunningTime="2025-06-06 09:28:54.794136315 +0000 UTC m=+946.069561878" Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.866222 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-546489d6df-7hqx2"] Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.871029 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-546489d6df-7hqx2"] Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.896255 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7566756bf-8brnr"] Jun 06 09:28:54 crc kubenswrapper[4911]: I0606 09:28:54.901503 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7566756bf-8brnr"] Jun 06 09:28:55 crc kubenswrapper[4911]: W0606 09:28:55.047790 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd3241c2_4261_4525_b5dd_9a9711232726.slice/crio-f80a1ea74f5a6f3f23da7ba43c6eece425ae304c123d78e834469ddcbc13a651 WatchSource:0}: Error finding container f80a1ea74f5a6f3f23da7ba43c6eece425ae304c123d78e834469ddcbc13a651: Status 404 returned error can't find the container with id f80a1ea74f5a6f3f23da7ba43c6eece425ae304c123d78e834469ddcbc13a651 Jun 06 09:28:55 crc kubenswrapper[4911]: I0606 09:28:55.789443 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" event={"ID":"fd3241c2-4261-4525-b5dd-9a9711232726","Type":"ContainerStarted","Data":"f80a1ea74f5a6f3f23da7ba43c6eece425ae304c123d78e834469ddcbc13a651"} Jun 06 09:28:55 crc kubenswrapper[4911]: I0606 09:28:55.794142 4911 generic.go:334] "Generic (PLEG): container finished" podID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerID="15b9c767bd7729c2048e7502d9f7b9b186289ea5e0192b1f89156a7eb3a3dd8a" exitCode=0 Jun 06 09:28:55 crc kubenswrapper[4911]: I0606 09:28:55.794213 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" event={"ID":"94fcf722-4611-47e8-b20f-c57d35e8ecb7","Type":"ContainerDied","Data":"15b9c767bd7729c2048e7502d9f7b9b186289ea5e0192b1f89156a7eb3a3dd8a"} Jun 06 09:28:55 crc kubenswrapper[4911]: I0606 09:28:55.962200 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ab34f3d-4a2d-42a5-8bd4-877893166667" path="/var/lib/kubelet/pods/9ab34f3d-4a2d-42a5-8bd4-877893166667/volumes" Jun 06 09:28:55 crc kubenswrapper[4911]: I0606 09:28:55.962845 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fec3385d-41ae-41d2-80d4-4af58a656162" path="/var/lib/kubelet/pods/fec3385d-41ae-41d2-80d4-4af58a656162/volumes" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.598316 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.784900 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9887\" (UniqueName: \"kubernetes.io/projected/94fcf722-4611-47e8-b20f-c57d35e8ecb7-kube-api-access-c9887\") pod \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.785045 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-dns-svc\") pod \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.785077 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-config\") pod \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\" (UID: \"94fcf722-4611-47e8-b20f-c57d35e8ecb7\") " Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.791296 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94fcf722-4611-47e8-b20f-c57d35e8ecb7-kube-api-access-c9887" (OuterVolumeSpecName: "kube-api-access-c9887") pod "94fcf722-4611-47e8-b20f-c57d35e8ecb7" (UID: "94fcf722-4611-47e8-b20f-c57d35e8ecb7"). InnerVolumeSpecName "kube-api-access-c9887". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.820128 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" event={"ID":"94fcf722-4611-47e8-b20f-c57d35e8ecb7","Type":"ContainerDied","Data":"70704d9a240a011440909292ba2c9ac26650e0cd8196b60b51939e40b3ccb58d"} Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.820298 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5544c68b5-8q4nx" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.827741 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "94fcf722-4611-47e8-b20f-c57d35e8ecb7" (UID: "94fcf722-4611-47e8-b20f-c57d35e8ecb7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.841942 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-config" (OuterVolumeSpecName: "config") pod "94fcf722-4611-47e8-b20f-c57d35e8ecb7" (UID: "94fcf722-4611-47e8-b20f-c57d35e8ecb7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.886890 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9887\" (UniqueName: \"kubernetes.io/projected/94fcf722-4611-47e8-b20f-c57d35e8ecb7-kube-api-access-c9887\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.886934 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:57 crc kubenswrapper[4911]: I0606 09:28:57.886948 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94fcf722-4611-47e8-b20f-c57d35e8ecb7-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:28:58 crc kubenswrapper[4911]: I0606 09:28:58.144303 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5544c68b5-8q4nx"] Jun 06 09:28:58 crc kubenswrapper[4911]: I0606 09:28:58.150433 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5544c68b5-8q4nx"] Jun 06 09:28:59 crc kubenswrapper[4911]: I0606 09:28:59.959515 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" path="/var/lib/kubelet/pods/94fcf722-4611-47e8-b20f-c57d35e8ecb7/volumes" Jun 06 09:29:00 crc kubenswrapper[4911]: I0606 09:29:00.326252 4911 scope.go:117] "RemoveContainer" containerID="2a170211f91f2ee17c3d39ba4fbc7f86eb1aa6dea2e143fc74f63de0a1c1cbda" Jun 06 09:29:00 crc kubenswrapper[4911]: I0606 09:29:00.874438 4911 scope.go:117] "RemoveContainer" containerID="08bc6c67f75741e3ed803c013053566587e37397f18a2c4e2268f7da9241052a" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.009880 4911 scope.go:117] "RemoveContainer" containerID="15b9c767bd7729c2048e7502d9f7b9b186289ea5e0192b1f89156a7eb3a3dd8a" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.316358 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.380387 4911 scope.go:117] "RemoveContainer" containerID="5c4dc630afc6f6d00bfbfc7829615022d0b7f754323120d14ee00c29dda76ffd" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.840049 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-zxp2j"] Jun 06 09:29:01 crc kubenswrapper[4911]: E0606 09:29:01.841045 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841064 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: E0606 09:29:01.841075 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerName="dnsmasq-dns" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841081 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerName="dnsmasq-dns" Jun 06 09:29:01 crc kubenswrapper[4911]: E0606 09:29:01.841126 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ab34f3d-4a2d-42a5-8bd4-877893166667" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841134 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ab34f3d-4a2d-42a5-8bd4-877893166667" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: E0606 09:29:01.841145 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec3385d-41ae-41d2-80d4-4af58a656162" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841151 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec3385d-41ae-41d2-80d4-4af58a656162" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841358 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fec3385d-41ae-41d2-80d4-4af58a656162" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841372 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ab34f3d-4a2d-42a5-8bd4-877893166667" containerName="init" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841389 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="94fcf722-4611-47e8-b20f-c57d35e8ecb7" containerName="dnsmasq-dns" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.841890 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zxp2j" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.862661 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"6b9847c4a123626a7be96b480b8b31ed0796d77df359c9b4543cc2db6085b4ee"} Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.868753 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd3241c2-4261-4525-b5dd-9a9711232726" containerID="9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f" exitCode=0 Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.869965 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" event={"ID":"fd3241c2-4261-4525-b5dd-9a9711232726","Type":"ContainerDied","Data":"9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f"} Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.952577 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51082cd1-c675-4f4d-af2d-19f69020eb52-host\") pod \"crc-debug-zxp2j\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " pod="openstack/crc-debug-zxp2j" Jun 06 09:29:01 crc kubenswrapper[4911]: I0606 09:29:01.952890 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b8pp\" (UniqueName: \"kubernetes.io/projected/51082cd1-c675-4f4d-af2d-19f69020eb52-kube-api-access-7b8pp\") pod \"crc-debug-zxp2j\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " pod="openstack/crc-debug-zxp2j" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.054852 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b8pp\" (UniqueName: \"kubernetes.io/projected/51082cd1-c675-4f4d-af2d-19f69020eb52-kube-api-access-7b8pp\") pod \"crc-debug-zxp2j\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " pod="openstack/crc-debug-zxp2j" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.055564 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51082cd1-c675-4f4d-af2d-19f69020eb52-host\") pod \"crc-debug-zxp2j\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " pod="openstack/crc-debug-zxp2j" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.056586 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51082cd1-c675-4f4d-af2d-19f69020eb52-host\") pod \"crc-debug-zxp2j\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " pod="openstack/crc-debug-zxp2j" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.075272 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b8pp\" (UniqueName: \"kubernetes.io/projected/51082cd1-c675-4f4d-af2d-19f69020eb52-kube-api-access-7b8pp\") pod \"crc-debug-zxp2j\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " pod="openstack/crc-debug-zxp2j" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.244885 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zxp2j" Jun 06 09:29:02 crc kubenswrapper[4911]: W0606 09:29:02.279027 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51082cd1_c675_4f4d_af2d_19f69020eb52.slice/crio-f5a95d045910d6ad72552025ffaed768a18c51d400cdffbeb945e417159a4599 WatchSource:0}: Error finding container f5a95d045910d6ad72552025ffaed768a18c51d400cdffbeb945e417159a4599: Status 404 returned error can't find the container with id f5a95d045910d6ad72552025ffaed768a18c51d400cdffbeb945e417159a4599 Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.884296 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fc9662e0-ac5c-46b2-b608-945afee3f990","Type":"ContainerStarted","Data":"782561d47f2894733753efbf8bc3cce8372441b98b969e2e892f7f18fce5a5c0"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.886511 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"42b9b63b-5112-46d4-98d8-01e75d78b84f","Type":"ContainerStarted","Data":"2b16996718fa13137c829e1a95a4d2befa2bbe324d5aeabec540ff976e999e5e"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.888112 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abf307d7-9aa9-4d2f-9943-e3a085568096","Type":"ContainerStarted","Data":"6f0518a6b3cc2f944aa4e2c7b254c3bf8b64829224f9a2eb7ac9e7286d602d50"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.890621 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ececc27b-f281-4e57-9c46-0cbe3a1ab2d7","Type":"ContainerStarted","Data":"9c9e46f1b412a66882c610ea7caac586f341d9f8941e43bf3630fa2d09545098"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.892077 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4dlv9" event={"ID":"5a8407a6-611d-477c-8530-9c1728797994","Type":"ContainerStarted","Data":"cbe9e51a6e35b3ae2c9170188d1985b38c695a79405102f0c1ca4d2731837012"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.892152 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-4dlv9" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.896611 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20","Type":"ContainerStarted","Data":"e3848f587cad350443f3d3b38f08b2dbdda24ecf8229452f674285ce39a5b0e9"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.896755 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.898963 4911 generic.go:334] "Generic (PLEG): container finished" podID="372d84e8-5035-48ca-9ee9-676bf64886c9" containerID="1e0c6c57a33b36ba298ce5a8bfde02311a0b33e46621e6fa327a071fb8daa3de" exitCode=0 Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.899037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jjkvl" event={"ID":"372d84e8-5035-48ca-9ee9-676bf64886c9","Type":"ContainerDied","Data":"1e0c6c57a33b36ba298ce5a8bfde02311a0b33e46621e6fa327a071fb8daa3de"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.906478 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b","Type":"ContainerStarted","Data":"c3a606933664264d59d48b53d9580c8a6c443cee2780a748e2278b4c801f812a"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.909538 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4","Type":"ContainerStarted","Data":"cb54fd730abcec5f423a30e6720800012bb616571840ae9eba50348833ebe2a7"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.913926 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-zxp2j" event={"ID":"51082cd1-c675-4f4d-af2d-19f69020eb52","Type":"ContainerStarted","Data":"52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.913973 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-zxp2j" event={"ID":"51082cd1-c675-4f4d-af2d-19f69020eb52","Type":"ContainerStarted","Data":"f5a95d045910d6ad72552025ffaed768a18c51d400cdffbeb945e417159a4599"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.916404 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a014170-9bee-45a3-af73-3a5c0418de93","Type":"ContainerStarted","Data":"ead6721d9e3dd513e9b22bae105bbd48b465dff45673bd08c03d08293f46c111"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.916550 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.918775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" event={"ID":"fd3241c2-4261-4525-b5dd-9a9711232726","Type":"ContainerStarted","Data":"9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d"} Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.918886 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.977587 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4dlv9" podStartSLOduration=6.081560871 podStartE2EDuration="13.977565693s" podCreationTimestamp="2025-06-06 09:28:49 +0000 UTC" firstStartedPulling="2025-06-06 09:28:52.962756798 +0000 UTC m=+944.238182341" lastFinishedPulling="2025-06-06 09:29:00.85876162 +0000 UTC m=+952.134187163" observedRunningTime="2025-06-06 09:29:02.970462552 +0000 UTC m=+954.245888125" watchObservedRunningTime="2025-06-06 09:29:02.977565693 +0000 UTC m=+954.252991236" Jun 06 09:29:02 crc kubenswrapper[4911]: I0606 09:29:02.990361 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=10.09560375 podStartE2EDuration="17.99033679s" podCreationTimestamp="2025-06-06 09:28:45 +0000 UTC" firstStartedPulling="2025-06-06 09:28:52.821828021 +0000 UTC m=+944.097253564" lastFinishedPulling="2025-06-06 09:29:00.716561071 +0000 UTC m=+951.991986604" observedRunningTime="2025-06-06 09:29:02.989083108 +0000 UTC m=+954.264508651" watchObservedRunningTime="2025-06-06 09:29:02.99033679 +0000 UTC m=+954.265762343" Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.008226 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.965793549 podStartE2EDuration="13.008202027s" podCreationTimestamp="2025-06-06 09:28:50 +0000 UTC" firstStartedPulling="2025-06-06 09:28:53.031025155 +0000 UTC m=+944.306450698" lastFinishedPulling="2025-06-06 09:29:01.073433593 +0000 UTC m=+952.348859176" observedRunningTime="2025-06-06 09:29:03.005438797 +0000 UTC m=+954.280864350" watchObservedRunningTime="2025-06-06 09:29:03.008202027 +0000 UTC m=+954.283627570" Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.027850 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.245345274 podStartE2EDuration="11.027814489s" podCreationTimestamp="2025-06-06 09:28:52 +0000 UTC" firstStartedPulling="2025-06-06 09:28:54.291508722 +0000 UTC m=+945.566934275" lastFinishedPulling="2025-06-06 09:29:01.073977907 +0000 UTC m=+952.349403490" observedRunningTime="2025-06-06 09:29:03.026368022 +0000 UTC m=+954.301793575" watchObservedRunningTime="2025-06-06 09:29:03.027814489 +0000 UTC m=+954.303240042" Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.048780 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.499469985 podStartE2EDuration="18.048761435s" podCreationTimestamp="2025-06-06 09:28:45 +0000 UTC" firstStartedPulling="2025-06-06 09:28:52.967687944 +0000 UTC m=+944.243113487" lastFinishedPulling="2025-06-06 09:29:01.516979394 +0000 UTC m=+952.792404937" observedRunningTime="2025-06-06 09:29:03.044114626 +0000 UTC m=+954.319540169" watchObservedRunningTime="2025-06-06 09:29:03.048761435 +0000 UTC m=+954.324186978" Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.110421 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-zxp2j" podStartSLOduration=2.110404263 podStartE2EDuration="2.110404263s" podCreationTimestamp="2025-06-06 09:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:29:03.084730076 +0000 UTC m=+954.360155629" watchObservedRunningTime="2025-06-06 09:29:03.110404263 +0000 UTC m=+954.385829806" Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.124904 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" podStartSLOduration=10.124881344 podStartE2EDuration="10.124881344s" podCreationTimestamp="2025-06-06 09:28:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:29:03.109712285 +0000 UTC m=+954.385137828" watchObservedRunningTime="2025-06-06 09:29:03.124881344 +0000 UTC m=+954.400306887" Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.649010 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.931735 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jjkvl" event={"ID":"372d84e8-5035-48ca-9ee9-676bf64886c9","Type":"ContainerStarted","Data":"189d6ae7dd093ffa73af789547da9e465dc91565f3a587da3a03c3a0bd210c99"} Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.931800 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-jjkvl" event={"ID":"372d84e8-5035-48ca-9ee9-676bf64886c9","Type":"ContainerStarted","Data":"52caf253aa491dae256c1f09046be8e6f5985090d6e4df2ba3537dff0ea399f1"} Jun 06 09:29:03 crc kubenswrapper[4911]: I0606 09:29:03.958173 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-jjkvl" podStartSLOduration=7.191553767 podStartE2EDuration="14.958154058s" podCreationTimestamp="2025-06-06 09:28:49 +0000 UTC" firstStartedPulling="2025-06-06 09:28:53.092140279 +0000 UTC m=+944.367565822" lastFinishedPulling="2025-06-06 09:29:00.85874057 +0000 UTC m=+952.134166113" observedRunningTime="2025-06-06 09:29:03.95081441 +0000 UTC m=+955.226239953" watchObservedRunningTime="2025-06-06 09:29:03.958154058 +0000 UTC m=+955.233579601" Jun 06 09:29:04 crc kubenswrapper[4911]: I0606 09:29:04.056889 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jun 06 09:29:04 crc kubenswrapper[4911]: I0606 09:29:04.095872 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jun 06 09:29:04 crc kubenswrapper[4911]: I0606 09:29:04.642033 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:29:04 crc kubenswrapper[4911]: I0606 09:29:04.642145 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:29:04 crc kubenswrapper[4911]: I0606 09:29:04.943647 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jun 06 09:29:05 crc kubenswrapper[4911]: I0606 09:29:05.649684 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jun 06 09:29:05 crc kubenswrapper[4911]: I0606 09:29:05.689418 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jun 06 09:29:05 crc kubenswrapper[4911]: I0606 09:29:05.955068 4911 generic.go:334] "Generic (PLEG): container finished" podID="4a2cb894-6a4a-4a24-aedb-b3dabe082a4b" containerID="c3a606933664264d59d48b53d9580c8a6c443cee2780a748e2278b4c801f812a" exitCode=0 Jun 06 09:29:05 crc kubenswrapper[4911]: I0606 09:29:05.957201 4911 generic.go:334] "Generic (PLEG): container finished" podID="fc9662e0-ac5c-46b2-b608-945afee3f990" containerID="782561d47f2894733753efbf8bc3cce8372441b98b969e2e892f7f18fce5a5c0" exitCode=0 Jun 06 09:29:05 crc kubenswrapper[4911]: I0606 09:29:05.958170 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b","Type":"ContainerDied","Data":"c3a606933664264d59d48b53d9580c8a6c443cee2780a748e2278b4c801f812a"} Jun 06 09:29:05 crc kubenswrapper[4911]: I0606 09:29:05.958225 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fc9662e0-ac5c-46b2-b608-945afee3f990","Type":"ContainerDied","Data":"782561d47f2894733753efbf8bc3cce8372441b98b969e2e892f7f18fce5a5c0"} Jun 06 09:29:06 crc kubenswrapper[4911]: I0606 09:29:06.970722 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a2cb894-6a4a-4a24-aedb-b3dabe082a4b","Type":"ContainerStarted","Data":"a3b3edaebc1b03a77b3ba98129ef4db2aa0aabe1698c949eddea54a20a3ec161"} Jun 06 09:29:06 crc kubenswrapper[4911]: I0606 09:29:06.973888 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fc9662e0-ac5c-46b2-b608-945afee3f990","Type":"ContainerStarted","Data":"9b02b915d9abb136db060ec1dff9879afa494fc33f94c0bdf172caa4d1300a61"} Jun 06 09:29:07 crc kubenswrapper[4911]: I0606 09:29:07.001108 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=15.990447185 podStartE2EDuration="24.00106215s" podCreationTimestamp="2025-06-06 09:28:43 +0000 UTC" firstStartedPulling="2025-06-06 09:28:53.062849109 +0000 UTC m=+944.338274642" lastFinishedPulling="2025-06-06 09:29:01.073464064 +0000 UTC m=+952.348889607" observedRunningTime="2025-06-06 09:29:06.995847006 +0000 UTC m=+958.271272549" watchObservedRunningTime="2025-06-06 09:29:07.00106215 +0000 UTC m=+958.276487693" Jun 06 09:29:07 crc kubenswrapper[4911]: I0606 09:29:07.017685 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=15.804609937 podStartE2EDuration="24.017661154s" podCreationTimestamp="2025-06-06 09:28:43 +0000 UTC" firstStartedPulling="2025-06-06 09:28:52.582439114 +0000 UTC m=+943.857864657" lastFinishedPulling="2025-06-06 09:29:00.795490321 +0000 UTC m=+952.070915874" observedRunningTime="2025-06-06 09:29:07.016067824 +0000 UTC m=+958.291493377" watchObservedRunningTime="2025-06-06 09:29:07.017661154 +0000 UTC m=+958.293086697" Jun 06 09:29:07 crc kubenswrapper[4911]: I0606 09:29:07.100984 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.688010 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.798258 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.864836 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d5db84f4f-667cv"] Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.865079 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" podUID="f904c35e-e8d4-40be-97b0-3c897429628b" containerName="dnsmasq-dns" containerID="cri-o://d71ac0a6ac51eacac9267f7006737df2b9a0eebb8c69a1ca7f053c9ba77e5251" gracePeriod=10 Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.931680 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-5f844cb4b9-bl729"] Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.932658 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.942218 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.951274 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-nmr6x" Jun 06 09:29:08 crc kubenswrapper[4911]: I0606 09:29:08.959736 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-5f844cb4b9-bl729"] Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.006697 4911 generic.go:334] "Generic (PLEG): container finished" podID="f904c35e-e8d4-40be-97b0-3c897429628b" containerID="d71ac0a6ac51eacac9267f7006737df2b9a0eebb8c69a1ca7f053c9ba77e5251" exitCode=0 Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.006757 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" event={"ID":"f904c35e-e8d4-40be-97b0-3c897429628b","Type":"ContainerDied","Data":"d71ac0a6ac51eacac9267f7006737df2b9a0eebb8c69a1ca7f053c9ba77e5251"} Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.007187 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-ovn-northd-tls-certs\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.007366 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-combined-ca-bundle\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.007462 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glh4k\" (UniqueName: \"kubernetes.io/projected/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-kube-api-access-glh4k\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.020870 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56bfd68fbc-zv9jl"] Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.022809 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.025257 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.031979 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56bfd68fbc-zv9jl"] Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109032 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-dns-svc\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109415 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-sb\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109467 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-combined-ca-bundle\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109514 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-nb\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109539 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glh4k\" (UniqueName: \"kubernetes.io/projected/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-kube-api-access-glh4k\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109765 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-config\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109796 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-ovn-northd-tls-certs\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.109821 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gtzf\" (UniqueName: \"kubernetes.io/projected/5230e066-1713-410d-a491-28e7f6520e07-kube-api-access-8gtzf\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.117814 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-ovn-northd-tls-certs\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.117883 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-combined-ca-bundle\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.140710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glh4k\" (UniqueName: \"kubernetes.io/projected/d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9-kube-api-access-glh4k\") pod \"ovn-northd-5f844cb4b9-bl729\" (UID: \"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9\") " pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.211642 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-sb\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.211751 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-nb\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.211792 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-config\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.211829 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gtzf\" (UniqueName: \"kubernetes.io/projected/5230e066-1713-410d-a491-28e7f6520e07-kube-api-access-8gtzf\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.211872 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-dns-svc\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.212927 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-sb\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.212977 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-dns-svc\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.213681 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-config\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.213767 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-nb\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.235478 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gtzf\" (UniqueName: \"kubernetes.io/projected/5230e066-1713-410d-a491-28e7f6520e07-kube-api-access-8gtzf\") pod \"dnsmasq-dns-56bfd68fbc-zv9jl\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.320764 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.344434 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.407571 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.414459 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-config\") pod \"f904c35e-e8d4-40be-97b0-3c897429628b\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.414538 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-dns-svc\") pod \"f904c35e-e8d4-40be-97b0-3c897429628b\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.414613 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc8dg\" (UniqueName: \"kubernetes.io/projected/f904c35e-e8d4-40be-97b0-3c897429628b-kube-api-access-jc8dg\") pod \"f904c35e-e8d4-40be-97b0-3c897429628b\" (UID: \"f904c35e-e8d4-40be-97b0-3c897429628b\") " Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.432907 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f904c35e-e8d4-40be-97b0-3c897429628b-kube-api-access-jc8dg" (OuterVolumeSpecName: "kube-api-access-jc8dg") pod "f904c35e-e8d4-40be-97b0-3c897429628b" (UID: "f904c35e-e8d4-40be-97b0-3c897429628b"). InnerVolumeSpecName "kube-api-access-jc8dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.466286 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f904c35e-e8d4-40be-97b0-3c897429628b" (UID: "f904c35e-e8d4-40be-97b0-3c897429628b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.478610 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-config" (OuterVolumeSpecName: "config") pod "f904c35e-e8d4-40be-97b0-3c897429628b" (UID: "f904c35e-e8d4-40be-97b0-3c897429628b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.516246 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc8dg\" (UniqueName: \"kubernetes.io/projected/f904c35e-e8d4-40be-97b0-3c897429628b-kube-api-access-jc8dg\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.516285 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.516298 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904c35e-e8d4-40be-97b0-3c897429628b-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.802296 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-5f844cb4b9-bl729"] Jun 06 09:29:09 crc kubenswrapper[4911]: I0606 09:29:09.867196 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56bfd68fbc-zv9jl"] Jun 06 09:29:09 crc kubenswrapper[4911]: W0606 09:29:09.876669 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5230e066_1713_410d_a491_28e7f6520e07.slice/crio-b10b4996471f2280a14858a45d718f4bd0184d5627cb99e9614e776b32a8801b WatchSource:0}: Error finding container b10b4996471f2280a14858a45d718f4bd0184d5627cb99e9614e776b32a8801b: Status 404 returned error can't find the container with id b10b4996471f2280a14858a45d718f4bd0184d5627cb99e9614e776b32a8801b Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.015403 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-5f844cb4b9-bl729" event={"ID":"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9","Type":"ContainerStarted","Data":"a4e74c82c3a0d10a351add55726b07c5e010e54ac8ccf3062bcea6911195d7c9"} Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.016595 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" event={"ID":"5230e066-1713-410d-a491-28e7f6520e07","Type":"ContainerStarted","Data":"b10b4996471f2280a14858a45d718f4bd0184d5627cb99e9614e776b32a8801b"} Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.019212 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" event={"ID":"f904c35e-e8d4-40be-97b0-3c897429628b","Type":"ContainerDied","Data":"d9f3d120cb9b2d1aea5964703d8cba56d80160b8a3ff2e1cdeacd25baa3e329f"} Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.019267 4911 scope.go:117] "RemoveContainer" containerID="d71ac0a6ac51eacac9267f7006737df2b9a0eebb8c69a1ca7f053c9ba77e5251" Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.019331 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d5db84f4f-667cv" Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.052283 4911 scope.go:117] "RemoveContainer" containerID="ee430fb75688c01f8ec103d9ce71c5e541f803ed5c46fa7e651c71f75edb7755" Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.063247 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d5db84f4f-667cv"] Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.070494 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d5db84f4f-667cv"] Jun 06 09:29:10 crc kubenswrapper[4911]: I0606 09:29:10.536426 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jun 06 09:29:11 crc kubenswrapper[4911]: I0606 09:29:11.042877 4911 generic.go:334] "Generic (PLEG): container finished" podID="5230e066-1713-410d-a491-28e7f6520e07" containerID="edc0ba7d7ea0e2cca4c2670b972d39ece4a571403e00eaa4149e34b60663c11b" exitCode=0 Jun 06 09:29:11 crc kubenswrapper[4911]: I0606 09:29:11.043263 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" event={"ID":"5230e066-1713-410d-a491-28e7f6520e07","Type":"ContainerDied","Data":"edc0ba7d7ea0e2cca4c2670b972d39ece4a571403e00eaa4149e34b60663c11b"} Jun 06 09:29:11 crc kubenswrapper[4911]: I0606 09:29:11.958049 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f904c35e-e8d4-40be-97b0-3c897429628b" path="/var/lib/kubelet/pods/f904c35e-e8d4-40be-97b0-3c897429628b/volumes" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.056021 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-5f844cb4b9-bl729" event={"ID":"d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9","Type":"ContainerStarted","Data":"f65b4350dafff23bfba604c394bdafc2dbe7fe3de5fdb5764ae7cf9b856413a5"} Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.056223 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.062921 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" event={"ID":"5230e066-1713-410d-a491-28e7f6520e07","Type":"ContainerStarted","Data":"6244a8bb86371225cb90aebbccf64b08af50514343c9548f8e0007e8acee50b6"} Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.063445 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.076831 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-5f844cb4b9-bl729" podStartSLOduration=2.6680095919999998 podStartE2EDuration="4.076804556s" podCreationTimestamp="2025-06-06 09:29:08 +0000 UTC" firstStartedPulling="2025-06-06 09:29:09.811618196 +0000 UTC m=+961.087043739" lastFinishedPulling="2025-06-06 09:29:11.22041316 +0000 UTC m=+962.495838703" observedRunningTime="2025-06-06 09:29:12.073336457 +0000 UTC m=+963.348762020" watchObservedRunningTime="2025-06-06 09:29:12.076804556 +0000 UTC m=+963.352230099" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.099130 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" podStartSLOduration=4.099077186 podStartE2EDuration="4.099077186s" podCreationTimestamp="2025-06-06 09:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:29:12.095845573 +0000 UTC m=+963.371271146" watchObservedRunningTime="2025-06-06 09:29:12.099077186 +0000 UTC m=+963.374502729" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.636020 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-zxp2j"] Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.636300 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-zxp2j" podUID="51082cd1-c675-4f4d-af2d-19f69020eb52" containerName="container-00" containerID="cri-o://52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7" gracePeriod=2 Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.645513 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-zxp2j"] Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.724337 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zxp2j" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.875352 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51082cd1-c675-4f4d-af2d-19f69020eb52-host\") pod \"51082cd1-c675-4f4d-af2d-19f69020eb52\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.875501 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b8pp\" (UniqueName: \"kubernetes.io/projected/51082cd1-c675-4f4d-af2d-19f69020eb52-kube-api-access-7b8pp\") pod \"51082cd1-c675-4f4d-af2d-19f69020eb52\" (UID: \"51082cd1-c675-4f4d-af2d-19f69020eb52\") " Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.875500 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/51082cd1-c675-4f4d-af2d-19f69020eb52-host" (OuterVolumeSpecName: "host") pod "51082cd1-c675-4f4d-af2d-19f69020eb52" (UID: "51082cd1-c675-4f4d-af2d-19f69020eb52"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.876078 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51082cd1-c675-4f4d-af2d-19f69020eb52-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.881455 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51082cd1-c675-4f4d-af2d-19f69020eb52-kube-api-access-7b8pp" (OuterVolumeSpecName: "kube-api-access-7b8pp") pod "51082cd1-c675-4f4d-af2d-19f69020eb52" (UID: "51082cd1-c675-4f4d-af2d-19f69020eb52"). InnerVolumeSpecName "kube-api-access-7b8pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:12 crc kubenswrapper[4911]: I0606 09:29:12.977515 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b8pp\" (UniqueName: \"kubernetes.io/projected/51082cd1-c675-4f4d-af2d-19f69020eb52-kube-api-access-7b8pp\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:13 crc kubenswrapper[4911]: I0606 09:29:13.069674 4911 generic.go:334] "Generic (PLEG): container finished" podID="51082cd1-c675-4f4d-af2d-19f69020eb52" containerID="52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7" exitCode=0 Jun 06 09:29:13 crc kubenswrapper[4911]: I0606 09:29:13.069753 4911 scope.go:117] "RemoveContainer" containerID="52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7" Jun 06 09:29:13 crc kubenswrapper[4911]: I0606 09:29:13.070714 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zxp2j" Jun 06 09:29:13 crc kubenswrapper[4911]: I0606 09:29:13.092713 4911 scope.go:117] "RemoveContainer" containerID="52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7" Jun 06 09:29:13 crc kubenswrapper[4911]: E0606 09:29:13.093922 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7\": container with ID starting with 52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7 not found: ID does not exist" containerID="52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7" Jun 06 09:29:13 crc kubenswrapper[4911]: I0606 09:29:13.093988 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7"} err="failed to get container status \"52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7\": rpc error: code = NotFound desc = could not find container \"52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7\": container with ID starting with 52d0be6f6f96480a845c7f0bc515c1b45d9d98ef6fca5a0caf7d3979a65ec3a7 not found: ID does not exist" Jun 06 09:29:13 crc kubenswrapper[4911]: I0606 09:29:13.960017 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51082cd1-c675-4f4d-af2d-19f69020eb52" path="/var/lib/kubelet/pods/51082cd1-c675-4f4d-af2d-19f69020eb52/volumes" Jun 06 09:29:15 crc kubenswrapper[4911]: I0606 09:29:15.211193 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jun 06 09:29:15 crc kubenswrapper[4911]: I0606 09:29:15.211572 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jun 06 09:29:15 crc kubenswrapper[4911]: I0606 09:29:15.258875 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jun 06 09:29:15 crc kubenswrapper[4911]: I0606 09:29:15.388397 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jun 06 09:29:15 crc kubenswrapper[4911]: I0606 09:29:15.388469 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jun 06 09:29:15 crc kubenswrapper[4911]: I0606 09:29:15.430915 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.118843 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bfd68fbc-zv9jl"] Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.119116 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" podUID="5230e066-1713-410d-a491-28e7f6520e07" containerName="dnsmasq-dns" containerID="cri-o://6244a8bb86371225cb90aebbccf64b08af50514343c9548f8e0007e8acee50b6" gracePeriod=10 Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.120487 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.165267 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-844f6c47c-tc4ph"] Jun 06 09:29:16 crc kubenswrapper[4911]: E0606 09:29:16.165659 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51082cd1-c675-4f4d-af2d-19f69020eb52" containerName="container-00" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.165677 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="51082cd1-c675-4f4d-af2d-19f69020eb52" containerName="container-00" Jun 06 09:29:16 crc kubenswrapper[4911]: E0606 09:29:16.165705 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f904c35e-e8d4-40be-97b0-3c897429628b" containerName="dnsmasq-dns" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.165711 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f904c35e-e8d4-40be-97b0-3c897429628b" containerName="dnsmasq-dns" Jun 06 09:29:16 crc kubenswrapper[4911]: E0606 09:29:16.165722 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f904c35e-e8d4-40be-97b0-3c897429628b" containerName="init" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.165730 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f904c35e-e8d4-40be-97b0-3c897429628b" containerName="init" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.166049 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="51082cd1-c675-4f4d-af2d-19f69020eb52" containerName="container-00" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.166075 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f904c35e-e8d4-40be-97b0-3c897429628b" containerName="dnsmasq-dns" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.166921 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.186676 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844f6c47c-tc4ph"] Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.200403 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.206486 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.218647 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.334814 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffg8c\" (UniqueName: \"kubernetes.io/projected/b62868b9-909f-4974-a624-796bfb6a2372-kube-api-access-ffg8c\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.335246 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-sb\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.335310 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-config\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.335366 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-nb\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.335405 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-dns-svc\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.436951 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-sb\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.437018 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-config\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.437062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-nb\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.437086 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-dns-svc\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.438124 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-config\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.438166 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-dns-svc\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.438192 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-nb\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.438208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffg8c\" (UniqueName: \"kubernetes.io/projected/b62868b9-909f-4974-a624-796bfb6a2372-kube-api-access-ffg8c\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.438545 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-sb\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.462224 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffg8c\" (UniqueName: \"kubernetes.io/projected/b62868b9-909f-4974-a624-796bfb6a2372-kube-api-access-ffg8c\") pod \"dnsmasq-dns-844f6c47c-tc4ph\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.485857 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:16 crc kubenswrapper[4911]: I0606 09:29:16.925504 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844f6c47c-tc4ph"] Jun 06 09:29:16 crc kubenswrapper[4911]: W0606 09:29:16.926325 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb62868b9_909f_4974_a624_796bfb6a2372.slice/crio-a99255ccfc0269f3b9abfb586ceffb353a9a70a6c9536a0974bd7b439041a43f WatchSource:0}: Error finding container a99255ccfc0269f3b9abfb586ceffb353a9a70a6c9536a0974bd7b439041a43f: Status 404 returned error can't find the container with id a99255ccfc0269f3b9abfb586ceffb353a9a70a6c9536a0974bd7b439041a43f Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.118064 4911 generic.go:334] "Generic (PLEG): container finished" podID="5230e066-1713-410d-a491-28e7f6520e07" containerID="6244a8bb86371225cb90aebbccf64b08af50514343c9548f8e0007e8acee50b6" exitCode=0 Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.118188 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" event={"ID":"5230e066-1713-410d-a491-28e7f6520e07","Type":"ContainerDied","Data":"6244a8bb86371225cb90aebbccf64b08af50514343c9548f8e0007e8acee50b6"} Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.119362 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" event={"ID":"b62868b9-909f-4974-a624-796bfb6a2372","Type":"ContainerStarted","Data":"a99255ccfc0269f3b9abfb586ceffb353a9a70a6c9536a0974bd7b439041a43f"} Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.398382 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.426983 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.429985 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.430191 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.430785 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-qnm8d" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.430946 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.441441 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.560432 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.560506 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.560541 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-lock\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.560580 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjvrm\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-kube-api-access-vjvrm\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.560604 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-cache\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.662043 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.662141 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.662174 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-lock\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.662208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjvrm\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-kube-api-access-vjvrm\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.662229 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-cache\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.662735 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-cache\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.663054 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: E0606 09:29:17.664550 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.664593 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-lock\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: E0606 09:29:17.664629 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jun 06 09:29:17 crc kubenswrapper[4911]: E0606 09:29:17.664692 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift podName:bbc248fe-b133-4e7e-aad5-c29a3c215e6b nodeName:}" failed. No retries permitted until 2025-06-06 09:29:18.16467404 +0000 UTC m=+969.440099573 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift") pod "swift-storage-0" (UID: "bbc248fe-b133-4e7e-aad5-c29a3c215e6b") : configmap "swift-ring-files" not found Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.685545 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjvrm\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-kube-api-access-vjvrm\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:17 crc kubenswrapper[4911]: I0606 09:29:17.686610 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:18 crc kubenswrapper[4911]: I0606 09:29:18.170599 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:18 crc kubenswrapper[4911]: E0606 09:29:18.170810 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jun 06 09:29:18 crc kubenswrapper[4911]: E0606 09:29:18.170875 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jun 06 09:29:18 crc kubenswrapper[4911]: E0606 09:29:18.170963 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift podName:bbc248fe-b133-4e7e-aad5-c29a3c215e6b nodeName:}" failed. No retries permitted until 2025-06-06 09:29:19.170925256 +0000 UTC m=+970.446350799 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift") pod "swift-storage-0" (UID: "bbc248fe-b133-4e7e-aad5-c29a3c215e6b") : configmap "swift-ring-files" not found Jun 06 09:29:19 crc kubenswrapper[4911]: I0606 09:29:19.185706 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:19 crc kubenswrapper[4911]: E0606 09:29:19.185909 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jun 06 09:29:19 crc kubenswrapper[4911]: E0606 09:29:19.186182 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jun 06 09:29:19 crc kubenswrapper[4911]: E0606 09:29:19.186238 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift podName:bbc248fe-b133-4e7e-aad5-c29a3c215e6b nodeName:}" failed. No retries permitted until 2025-06-06 09:29:21.186223598 +0000 UTC m=+972.461649141 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift") pod "swift-storage-0" (UID: "bbc248fe-b133-4e7e-aad5-c29a3c215e6b") : configmap "swift-ring-files" not found Jun 06 09:29:19 crc kubenswrapper[4911]: I0606 09:29:19.345771 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" podUID="5230e066-1713-410d-a491-28e7f6520e07" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.109:5353: connect: connection refused" Jun 06 09:29:19 crc kubenswrapper[4911]: I0606 09:29:19.355463 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-5f844cb4b9-bl729" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.118406 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.147859 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" event={"ID":"5230e066-1713-410d-a491-28e7f6520e07","Type":"ContainerDied","Data":"b10b4996471f2280a14858a45d718f4bd0184d5627cb99e9614e776b32a8801b"} Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.147913 4911 scope.go:117] "RemoveContainer" containerID="6244a8bb86371225cb90aebbccf64b08af50514343c9548f8e0007e8acee50b6" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.147926 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bfd68fbc-zv9jl" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.150758 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" event={"ID":"b62868b9-909f-4974-a624-796bfb6a2372","Type":"ContainerStarted","Data":"836f143f536c17110752a9ab56b614d830499e6a733eacfabdbdda0480e59c50"} Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.168284 4911 scope.go:117] "RemoveContainer" containerID="edc0ba7d7ea0e2cca4c2670b972d39ece4a571403e00eaa4149e34b60663c11b" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.202525 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-sb\") pod \"5230e066-1713-410d-a491-28e7f6520e07\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.202628 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-nb\") pod \"5230e066-1713-410d-a491-28e7f6520e07\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.202846 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-config\") pod \"5230e066-1713-410d-a491-28e7f6520e07\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.202973 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-dns-svc\") pod \"5230e066-1713-410d-a491-28e7f6520e07\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.203037 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gtzf\" (UniqueName: \"kubernetes.io/projected/5230e066-1713-410d-a491-28e7f6520e07-kube-api-access-8gtzf\") pod \"5230e066-1713-410d-a491-28e7f6520e07\" (UID: \"5230e066-1713-410d-a491-28e7f6520e07\") " Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.208679 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5230e066-1713-410d-a491-28e7f6520e07-kube-api-access-8gtzf" (OuterVolumeSpecName: "kube-api-access-8gtzf") pod "5230e066-1713-410d-a491-28e7f6520e07" (UID: "5230e066-1713-410d-a491-28e7f6520e07"). InnerVolumeSpecName "kube-api-access-8gtzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.240081 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5230e066-1713-410d-a491-28e7f6520e07" (UID: "5230e066-1713-410d-a491-28e7f6520e07"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.240440 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5230e066-1713-410d-a491-28e7f6520e07" (UID: "5230e066-1713-410d-a491-28e7f6520e07"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.243440 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5230e066-1713-410d-a491-28e7f6520e07" (UID: "5230e066-1713-410d-a491-28e7f6520e07"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.244117 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-config" (OuterVolumeSpecName: "config") pod "5230e066-1713-410d-a491-28e7f6520e07" (UID: "5230e066-1713-410d-a491-28e7f6520e07"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.306187 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.306229 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.306240 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gtzf\" (UniqueName: \"kubernetes.io/projected/5230e066-1713-410d-a491-28e7f6520e07-kube-api-access-8gtzf\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.306253 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.306263 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5230e066-1713-410d-a491-28e7f6520e07-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.481327 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bfd68fbc-zv9jl"] Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.487438 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56bfd68fbc-zv9jl"] Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.794218 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-s2nfn"] Jun 06 09:29:20 crc kubenswrapper[4911]: E0606 09:29:20.794839 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5230e066-1713-410d-a491-28e7f6520e07" containerName="dnsmasq-dns" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.794864 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5230e066-1713-410d-a491-28e7f6520e07" containerName="dnsmasq-dns" Jun 06 09:29:20 crc kubenswrapper[4911]: E0606 09:29:20.794885 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5230e066-1713-410d-a491-28e7f6520e07" containerName="init" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.794891 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5230e066-1713-410d-a491-28e7f6520e07" containerName="init" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.795055 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5230e066-1713-410d-a491-28e7f6520e07" containerName="dnsmasq-dns" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.795762 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2nfn" Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.815061 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-s2nfn"] Jun 06 09:29:20 crc kubenswrapper[4911]: I0606 09:29:20.934799 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcgd8\" (UniqueName: \"kubernetes.io/projected/36ecd1b9-f118-413b-9e59-0c6d2f389c44-kube-api-access-dcgd8\") pod \"glance-db-create-s2nfn\" (UID: \"36ecd1b9-f118-413b-9e59-0c6d2f389c44\") " pod="openstack/glance-db-create-s2nfn" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.036289 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcgd8\" (UniqueName: \"kubernetes.io/projected/36ecd1b9-f118-413b-9e59-0c6d2f389c44-kube-api-access-dcgd8\") pod \"glance-db-create-s2nfn\" (UID: \"36ecd1b9-f118-413b-9e59-0c6d2f389c44\") " pod="openstack/glance-db-create-s2nfn" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.052826 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcgd8\" (UniqueName: \"kubernetes.io/projected/36ecd1b9-f118-413b-9e59-0c6d2f389c44-kube-api-access-dcgd8\") pod \"glance-db-create-s2nfn\" (UID: \"36ecd1b9-f118-413b-9e59-0c6d2f389c44\") " pod="openstack/glance-db-create-s2nfn" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.121277 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2nfn" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.162328 4911 generic.go:334] "Generic (PLEG): container finished" podID="b62868b9-909f-4974-a624-796bfb6a2372" containerID="836f143f536c17110752a9ab56b614d830499e6a733eacfabdbdda0480e59c50" exitCode=0 Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.162400 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" event={"ID":"b62868b9-909f-4974-a624-796bfb6a2372","Type":"ContainerDied","Data":"836f143f536c17110752a9ab56b614d830499e6a733eacfabdbdda0480e59c50"} Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.214796 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-86z5n"] Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.216251 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.228051 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.228217 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.228255 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.246129 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:21 crc kubenswrapper[4911]: E0606 09:29:21.246744 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jun 06 09:29:21 crc kubenswrapper[4911]: E0606 09:29:21.246788 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jun 06 09:29:21 crc kubenswrapper[4911]: E0606 09:29:21.246837 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift podName:bbc248fe-b133-4e7e-aad5-c29a3c215e6b nodeName:}" failed. No retries permitted until 2025-06-06 09:29:25.246817153 +0000 UTC m=+976.522242766 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift") pod "swift-storage-0" (UID: "bbc248fe-b133-4e7e-aad5-c29a3c215e6b") : configmap "swift-ring-files" not found Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.274747 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-7lbp2"] Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.276169 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.277883 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-86z5n"] Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.283792 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-7lbp2"] Jun 06 09:29:21 crc kubenswrapper[4911]: E0606 09:29:21.303262 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-qcrwz ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-86z5n" podUID="ffca1b4a-086b-4e77-9afd-c064efe1661e" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.313424 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-86z5n"] Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.348674 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcrwz\" (UniqueName: \"kubernetes.io/projected/ffca1b4a-086b-4e77-9afd-c064efe1661e-kube-api-access-qcrwz\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.348742 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhlwv\" (UniqueName: \"kubernetes.io/projected/6d1f75cb-fd34-4f91-9ade-650845917e96-kube-api-access-jhlwv\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.348785 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-scripts\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.348817 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ffca1b4a-086b-4e77-9afd-c064efe1661e-etc-swift\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.348959 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-ring-data-devices\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.349007 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-scripts\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.349189 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-dispersionconf\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.349230 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-dispersionconf\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.349267 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-combined-ca-bundle\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.349361 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-ring-data-devices\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.349935 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-swiftconf\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.350075 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6d1f75cb-fd34-4f91-9ade-650845917e96-etc-swift\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.350163 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-combined-ca-bundle\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.350240 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-swiftconf\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.451953 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6d1f75cb-fd34-4f91-9ade-650845917e96-etc-swift\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452010 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-combined-ca-bundle\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452039 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-swiftconf\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcrwz\" (UniqueName: \"kubernetes.io/projected/ffca1b4a-086b-4e77-9afd-c064efe1661e-kube-api-access-qcrwz\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452120 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhlwv\" (UniqueName: \"kubernetes.io/projected/6d1f75cb-fd34-4f91-9ade-650845917e96-kube-api-access-jhlwv\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452154 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-scripts\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452172 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ffca1b4a-086b-4e77-9afd-c064efe1661e-etc-swift\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452204 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-ring-data-devices\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452221 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-scripts\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452255 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-dispersionconf\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452277 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-dispersionconf\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452294 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-combined-ca-bundle\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452336 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-ring-data-devices\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.452364 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-swiftconf\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.453619 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6d1f75cb-fd34-4f91-9ade-650845917e96-etc-swift\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.454667 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-ring-data-devices\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.454719 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-ring-data-devices\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.454771 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-scripts\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.454832 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-scripts\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.455629 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ffca1b4a-086b-4e77-9afd-c064efe1661e-etc-swift\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.459201 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-dispersionconf\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.459610 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-combined-ca-bundle\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.459734 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-swiftconf\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.461384 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-combined-ca-bundle\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.461723 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-dispersionconf\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.463242 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-swiftconf\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.473120 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhlwv\" (UniqueName: \"kubernetes.io/projected/6d1f75cb-fd34-4f91-9ade-650845917e96-kube-api-access-jhlwv\") pod \"swift-ring-rebalance-7lbp2\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.474155 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcrwz\" (UniqueName: \"kubernetes.io/projected/ffca1b4a-086b-4e77-9afd-c064efe1661e-kube-api-access-qcrwz\") pod \"swift-ring-rebalance-86z5n\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.618931 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-s2nfn"] Jun 06 09:29:21 crc kubenswrapper[4911]: W0606 09:29:21.624160 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36ecd1b9_f118_413b_9e59_0c6d2f389c44.slice/crio-3371f574f0c1060751c418181f2cf0df0783882f62140d531470255ca3d5e527 WatchSource:0}: Error finding container 3371f574f0c1060751c418181f2cf0df0783882f62140d531470255ca3d5e527: Status 404 returned error can't find the container with id 3371f574f0c1060751c418181f2cf0df0783882f62140d531470255ca3d5e527 Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.646040 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:21 crc kubenswrapper[4911]: I0606 09:29:21.958185 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5230e066-1713-410d-a491-28e7f6520e07" path="/var/lib/kubelet/pods/5230e066-1713-410d-a491-28e7f6520e07/volumes" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.124401 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-7lbp2"] Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.171552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7lbp2" event={"ID":"6d1f75cb-fd34-4f91-9ade-650845917e96","Type":"ContainerStarted","Data":"cb7378b357189067db74ccb35b78ae2c9e5820164afc8bd82f7ada5f599e69b3"} Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.173916 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" event={"ID":"b62868b9-909f-4974-a624-796bfb6a2372","Type":"ContainerStarted","Data":"845fd4412e94515340e61b45021359a7772ed0a4bf081cf4de63a4150156b123"} Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.174957 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.177511 4911 generic.go:334] "Generic (PLEG): container finished" podID="36ecd1b9-f118-413b-9e59-0c6d2f389c44" containerID="a86100116424caef4dcb237c217862df192b1fefa559f0b0768567ad1cf59bda" exitCode=0 Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.177606 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.177696 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2nfn" event={"ID":"36ecd1b9-f118-413b-9e59-0c6d2f389c44","Type":"ContainerDied","Data":"a86100116424caef4dcb237c217862df192b1fefa559f0b0768567ad1cf59bda"} Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.177727 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2nfn" event={"ID":"36ecd1b9-f118-413b-9e59-0c6d2f389c44","Type":"ContainerStarted","Data":"3371f574f0c1060751c418181f2cf0df0783882f62140d531470255ca3d5e527"} Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.189713 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.200715 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" podStartSLOduration=6.200697665 podStartE2EDuration="6.200697665s" podCreationTimestamp="2025-06-06 09:29:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:29:22.195158983 +0000 UTC m=+973.470584546" watchObservedRunningTime="2025-06-06 09:29:22.200697665 +0000 UTC m=+973.476123198" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266135 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-combined-ca-bundle\") pod \"ffca1b4a-086b-4e77-9afd-c064efe1661e\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266205 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ffca1b4a-086b-4e77-9afd-c064efe1661e-etc-swift\") pod \"ffca1b4a-086b-4e77-9afd-c064efe1661e\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266279 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcrwz\" (UniqueName: \"kubernetes.io/projected/ffca1b4a-086b-4e77-9afd-c064efe1661e-kube-api-access-qcrwz\") pod \"ffca1b4a-086b-4e77-9afd-c064efe1661e\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266332 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-ring-data-devices\") pod \"ffca1b4a-086b-4e77-9afd-c064efe1661e\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266352 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-swiftconf\") pod \"ffca1b4a-086b-4e77-9afd-c064efe1661e\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266517 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-scripts\") pod \"ffca1b4a-086b-4e77-9afd-c064efe1661e\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266547 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-dispersionconf\") pod \"ffca1b4a-086b-4e77-9afd-c064efe1661e\" (UID: \"ffca1b4a-086b-4e77-9afd-c064efe1661e\") " Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.266662 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffca1b4a-086b-4e77-9afd-c064efe1661e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ffca1b4a-086b-4e77-9afd-c064efe1661e" (UID: "ffca1b4a-086b-4e77-9afd-c064efe1661e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.267015 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-scripts" (OuterVolumeSpecName: "scripts") pod "ffca1b4a-086b-4e77-9afd-c064efe1661e" (UID: "ffca1b4a-086b-4e77-9afd-c064efe1661e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.267009 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "ffca1b4a-086b-4e77-9afd-c064efe1661e" (UID: "ffca1b4a-086b-4e77-9afd-c064efe1661e"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.267041 4911 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/ffca1b4a-086b-4e77-9afd-c064efe1661e-etc-swift\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.274345 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "ffca1b4a-086b-4e77-9afd-c064efe1661e" (UID: "ffca1b4a-086b-4e77-9afd-c064efe1661e"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.274370 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffca1b4a-086b-4e77-9afd-c064efe1661e-kube-api-access-qcrwz" (OuterVolumeSpecName: "kube-api-access-qcrwz") pod "ffca1b4a-086b-4e77-9afd-c064efe1661e" (UID: "ffca1b4a-086b-4e77-9afd-c064efe1661e"). InnerVolumeSpecName "kube-api-access-qcrwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.274546 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffca1b4a-086b-4e77-9afd-c064efe1661e" (UID: "ffca1b4a-086b-4e77-9afd-c064efe1661e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.275282 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "ffca1b4a-086b-4e77-9afd-c064efe1661e" (UID: "ffca1b4a-086b-4e77-9afd-c064efe1661e"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.368833 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.368876 4911 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-dispersionconf\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.368888 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.368902 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcrwz\" (UniqueName: \"kubernetes.io/projected/ffca1b4a-086b-4e77-9afd-c064efe1661e-kube-api-access-qcrwz\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.368913 4911 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/ffca1b4a-086b-4e77-9afd-c064efe1661e-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:22 crc kubenswrapper[4911]: I0606 09:29:22.368921 4911 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/ffca1b4a-086b-4e77-9afd-c064efe1661e-swiftconf\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:23 crc kubenswrapper[4911]: I0606 09:29:23.184937 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-86z5n" Jun 06 09:29:23 crc kubenswrapper[4911]: I0606 09:29:23.247609 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-86z5n"] Jun 06 09:29:23 crc kubenswrapper[4911]: I0606 09:29:23.253559 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-86z5n"] Jun 06 09:29:23 crc kubenswrapper[4911]: I0606 09:29:23.964739 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffca1b4a-086b-4e77-9afd-c064efe1661e" path="/var/lib/kubelet/pods/ffca1b4a-086b-4e77-9afd-c064efe1661e/volumes" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.060305 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-j6gv7"] Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.062179 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j6gv7" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.066713 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-j6gv7"] Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.120801 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzxf8\" (UniqueName: \"kubernetes.io/projected/92b2b6d0-40dc-43a8-bcc1-6094912af4d2-kube-api-access-hzxf8\") pod \"keystone-db-create-j6gv7\" (UID: \"92b2b6d0-40dc-43a8-bcc1-6094912af4d2\") " pod="openstack/keystone-db-create-j6gv7" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.201017 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2nfn" event={"ID":"36ecd1b9-f118-413b-9e59-0c6d2f389c44","Type":"ContainerDied","Data":"3371f574f0c1060751c418181f2cf0df0783882f62140d531470255ca3d5e527"} Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.201237 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3371f574f0c1060751c418181f2cf0df0783882f62140d531470255ca3d5e527" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.222675 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzxf8\" (UniqueName: \"kubernetes.io/projected/92b2b6d0-40dc-43a8-bcc1-6094912af4d2-kube-api-access-hzxf8\") pod \"keystone-db-create-j6gv7\" (UID: \"92b2b6d0-40dc-43a8-bcc1-6094912af4d2\") " pod="openstack/keystone-db-create-j6gv7" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.239610 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzxf8\" (UniqueName: \"kubernetes.io/projected/92b2b6d0-40dc-43a8-bcc1-6094912af4d2-kube-api-access-hzxf8\") pod \"keystone-db-create-j6gv7\" (UID: \"92b2b6d0-40dc-43a8-bcc1-6094912af4d2\") " pod="openstack/keystone-db-create-j6gv7" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.272758 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2nfn" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.324737 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:25 crc kubenswrapper[4911]: E0606 09:29:25.324934 4911 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jun 06 09:29:25 crc kubenswrapper[4911]: E0606 09:29:25.325355 4911 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jun 06 09:29:25 crc kubenswrapper[4911]: E0606 09:29:25.325420 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift podName:bbc248fe-b133-4e7e-aad5-c29a3c215e6b nodeName:}" failed. No retries permitted until 2025-06-06 09:29:33.325400971 +0000 UTC m=+984.600826514 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift") pod "swift-storage-0" (UID: "bbc248fe-b133-4e7e-aad5-c29a3c215e6b") : configmap "swift-ring-files" not found Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.353789 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-vpfq6"] Jun 06 09:29:25 crc kubenswrapper[4911]: E0606 09:29:25.354186 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ecd1b9-f118-413b-9e59-0c6d2f389c44" containerName="mariadb-database-create" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.354210 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ecd1b9-f118-413b-9e59-0c6d2f389c44" containerName="mariadb-database-create" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.354419 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ecd1b9-f118-413b-9e59-0c6d2f389c44" containerName="mariadb-database-create" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.355003 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vpfq6" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.365076 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-vpfq6"] Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.383639 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j6gv7" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.426979 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcgd8\" (UniqueName: \"kubernetes.io/projected/36ecd1b9-f118-413b-9e59-0c6d2f389c44-kube-api-access-dcgd8\") pod \"36ecd1b9-f118-413b-9e59-0c6d2f389c44\" (UID: \"36ecd1b9-f118-413b-9e59-0c6d2f389c44\") " Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.427560 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r4w8\" (UniqueName: \"kubernetes.io/projected/779f53c2-a052-43d1-8d17-09b3515e1812-kube-api-access-2r4w8\") pod \"placement-db-create-vpfq6\" (UID: \"779f53c2-a052-43d1-8d17-09b3515e1812\") " pod="openstack/placement-db-create-vpfq6" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.431363 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36ecd1b9-f118-413b-9e59-0c6d2f389c44-kube-api-access-dcgd8" (OuterVolumeSpecName: "kube-api-access-dcgd8") pod "36ecd1b9-f118-413b-9e59-0c6d2f389c44" (UID: "36ecd1b9-f118-413b-9e59-0c6d2f389c44"). InnerVolumeSpecName "kube-api-access-dcgd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.530558 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r4w8\" (UniqueName: \"kubernetes.io/projected/779f53c2-a052-43d1-8d17-09b3515e1812-kube-api-access-2r4w8\") pod \"placement-db-create-vpfq6\" (UID: \"779f53c2-a052-43d1-8d17-09b3515e1812\") " pod="openstack/placement-db-create-vpfq6" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.530956 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcgd8\" (UniqueName: \"kubernetes.io/projected/36ecd1b9-f118-413b-9e59-0c6d2f389c44-kube-api-access-dcgd8\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.551162 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r4w8\" (UniqueName: \"kubernetes.io/projected/779f53c2-a052-43d1-8d17-09b3515e1812-kube-api-access-2r4w8\") pod \"placement-db-create-vpfq6\" (UID: \"779f53c2-a052-43d1-8d17-09b3515e1812\") " pod="openstack/placement-db-create-vpfq6" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.681572 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vpfq6" Jun 06 09:29:25 crc kubenswrapper[4911]: I0606 09:29:25.819275 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-j6gv7"] Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.118604 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-vpfq6"] Jun 06 09:29:26 crc kubenswrapper[4911]: W0606 09:29:26.124160 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod779f53c2_a052_43d1_8d17_09b3515e1812.slice/crio-1af9f2aa88c1bfdafc8ced230c88289a00c676c9cba8a12833e7c06c9275b43f WatchSource:0}: Error finding container 1af9f2aa88c1bfdafc8ced230c88289a00c676c9cba8a12833e7c06c9275b43f: Status 404 returned error can't find the container with id 1af9f2aa88c1bfdafc8ced230c88289a00c676c9cba8a12833e7c06c9275b43f Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.210225 4911 generic.go:334] "Generic (PLEG): container finished" podID="92b2b6d0-40dc-43a8-bcc1-6094912af4d2" containerID="376ec9256b79fdda7406a82326a552ca9fbf73d28a69d28337ad4daddd28bbc6" exitCode=0 Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.210294 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-j6gv7" event={"ID":"92b2b6d0-40dc-43a8-bcc1-6094912af4d2","Type":"ContainerDied","Data":"376ec9256b79fdda7406a82326a552ca9fbf73d28a69d28337ad4daddd28bbc6"} Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.210501 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-j6gv7" event={"ID":"92b2b6d0-40dc-43a8-bcc1-6094912af4d2","Type":"ContainerStarted","Data":"c03b1a0728caf9ac466d680f08792bce8ba272832c158f858cef401c3ff1d828"} Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.211760 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-vpfq6" event={"ID":"779f53c2-a052-43d1-8d17-09b3515e1812","Type":"ContainerStarted","Data":"1af9f2aa88c1bfdafc8ced230c88289a00c676c9cba8a12833e7c06c9275b43f"} Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.213585 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2nfn" Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.213578 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7lbp2" event={"ID":"6d1f75cb-fd34-4f91-9ade-650845917e96","Type":"ContainerStarted","Data":"186271c08883afa401cdc66c33a894fce7dbbd5af99882e43bc481466923e0d4"} Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.254769 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-7lbp2" podStartSLOduration=2.241876199 podStartE2EDuration="5.254736273s" podCreationTimestamp="2025-06-06 09:29:21 +0000 UTC" firstStartedPulling="2025-06-06 09:29:22.131150415 +0000 UTC m=+973.406575958" lastFinishedPulling="2025-06-06 09:29:25.144010489 +0000 UTC m=+976.419436032" observedRunningTime="2025-06-06 09:29:26.242396958 +0000 UTC m=+977.517822511" watchObservedRunningTime="2025-06-06 09:29:26.254736273 +0000 UTC m=+977.530161816" Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.487240 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.553057 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-697f967d49-t9lmx"] Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.553397 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" podUID="fd3241c2-4261-4525-b5dd-9a9711232726" containerName="dnsmasq-dns" containerID="cri-o://9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d" gracePeriod=10 Jun 06 09:29:26 crc kubenswrapper[4911]: I0606 09:29:26.962337 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.055604 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-ovsdbserver-nb\") pod \"fd3241c2-4261-4525-b5dd-9a9711232726\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.055688 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvtk5\" (UniqueName: \"kubernetes.io/projected/fd3241c2-4261-4525-b5dd-9a9711232726-kube-api-access-mvtk5\") pod \"fd3241c2-4261-4525-b5dd-9a9711232726\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.055814 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-dns-svc\") pod \"fd3241c2-4261-4525-b5dd-9a9711232726\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.055908 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-config\") pod \"fd3241c2-4261-4525-b5dd-9a9711232726\" (UID: \"fd3241c2-4261-4525-b5dd-9a9711232726\") " Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.061386 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd3241c2-4261-4525-b5dd-9a9711232726-kube-api-access-mvtk5" (OuterVolumeSpecName: "kube-api-access-mvtk5") pod "fd3241c2-4261-4525-b5dd-9a9711232726" (UID: "fd3241c2-4261-4525-b5dd-9a9711232726"). InnerVolumeSpecName "kube-api-access-mvtk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.094375 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-config" (OuterVolumeSpecName: "config") pod "fd3241c2-4261-4525-b5dd-9a9711232726" (UID: "fd3241c2-4261-4525-b5dd-9a9711232726"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.107267 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fd3241c2-4261-4525-b5dd-9a9711232726" (UID: "fd3241c2-4261-4525-b5dd-9a9711232726"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.112568 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fd3241c2-4261-4525-b5dd-9a9711232726" (UID: "fd3241c2-4261-4525-b5dd-9a9711232726"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.157636 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.157673 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvtk5\" (UniqueName: \"kubernetes.io/projected/fd3241c2-4261-4525-b5dd-9a9711232726-kube-api-access-mvtk5\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.157688 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.157697 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd3241c2-4261-4525-b5dd-9a9711232726-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.224275 4911 generic.go:334] "Generic (PLEG): container finished" podID="fd3241c2-4261-4525-b5dd-9a9711232726" containerID="9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d" exitCode=0 Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.224384 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" event={"ID":"fd3241c2-4261-4525-b5dd-9a9711232726","Type":"ContainerDied","Data":"9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d"} Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.224456 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" event={"ID":"fd3241c2-4261-4525-b5dd-9a9711232726","Type":"ContainerDied","Data":"f80a1ea74f5a6f3f23da7ba43c6eece425ae304c123d78e834469ddcbc13a651"} Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.224486 4911 scope.go:117] "RemoveContainer" containerID="9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.224680 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-697f967d49-t9lmx" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.225729 4911 generic.go:334] "Generic (PLEG): container finished" podID="779f53c2-a052-43d1-8d17-09b3515e1812" containerID="dd9b3829522c91f7fe7428979e2e9e6c098817da4247700eb6574d202262d518" exitCode=0 Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.225831 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-vpfq6" event={"ID":"779f53c2-a052-43d1-8d17-09b3515e1812","Type":"ContainerDied","Data":"dd9b3829522c91f7fe7428979e2e9e6c098817da4247700eb6574d202262d518"} Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.242128 4911 scope.go:117] "RemoveContainer" containerID="9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.267255 4911 scope.go:117] "RemoveContainer" containerID="9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d" Jun 06 09:29:27 crc kubenswrapper[4911]: E0606 09:29:27.270685 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d\": container with ID starting with 9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d not found: ID does not exist" containerID="9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.270738 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d"} err="failed to get container status \"9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d\": rpc error: code = NotFound desc = could not find container \"9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d\": container with ID starting with 9cf64c1abebdbe898e09e2aff13a67a2a8e391d1caa333d97e5a383eb0d7c72d not found: ID does not exist" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.270771 4911 scope.go:117] "RemoveContainer" containerID="9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f" Jun 06 09:29:27 crc kubenswrapper[4911]: E0606 09:29:27.271243 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f\": container with ID starting with 9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f not found: ID does not exist" containerID="9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.271391 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f"} err="failed to get container status \"9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f\": rpc error: code = NotFound desc = could not find container \"9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f\": container with ID starting with 9241dcbbac44552e5eba4288bed6765d0bd4b30f691bbef23256158a3751c02f not found: ID does not exist" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.276064 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-697f967d49-t9lmx"] Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.283182 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-697f967d49-t9lmx"] Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.654542 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j6gv7" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.765725 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzxf8\" (UniqueName: \"kubernetes.io/projected/92b2b6d0-40dc-43a8-bcc1-6094912af4d2-kube-api-access-hzxf8\") pod \"92b2b6d0-40dc-43a8-bcc1-6094912af4d2\" (UID: \"92b2b6d0-40dc-43a8-bcc1-6094912af4d2\") " Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.771974 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92b2b6d0-40dc-43a8-bcc1-6094912af4d2-kube-api-access-hzxf8" (OuterVolumeSpecName: "kube-api-access-hzxf8") pod "92b2b6d0-40dc-43a8-bcc1-6094912af4d2" (UID: "92b2b6d0-40dc-43a8-bcc1-6094912af4d2"). InnerVolumeSpecName "kube-api-access-hzxf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.867931 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzxf8\" (UniqueName: \"kubernetes.io/projected/92b2b6d0-40dc-43a8-bcc1-6094912af4d2-kube-api-access-hzxf8\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:27 crc kubenswrapper[4911]: I0606 09:29:27.959308 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd3241c2-4261-4525-b5dd-9a9711232726" path="/var/lib/kubelet/pods/fd3241c2-4261-4525-b5dd-9a9711232726/volumes" Jun 06 09:29:28 crc kubenswrapper[4911]: I0606 09:29:28.253584 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-j6gv7" Jun 06 09:29:28 crc kubenswrapper[4911]: I0606 09:29:28.253583 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-j6gv7" event={"ID":"92b2b6d0-40dc-43a8-bcc1-6094912af4d2","Type":"ContainerDied","Data":"c03b1a0728caf9ac466d680f08792bce8ba272832c158f858cef401c3ff1d828"} Jun 06 09:29:28 crc kubenswrapper[4911]: I0606 09:29:28.253647 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c03b1a0728caf9ac466d680f08792bce8ba272832c158f858cef401c3ff1d828" Jun 06 09:29:28 crc kubenswrapper[4911]: I0606 09:29:28.581935 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vpfq6" Jun 06 09:29:28 crc kubenswrapper[4911]: I0606 09:29:28.685313 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r4w8\" (UniqueName: \"kubernetes.io/projected/779f53c2-a052-43d1-8d17-09b3515e1812-kube-api-access-2r4w8\") pod \"779f53c2-a052-43d1-8d17-09b3515e1812\" (UID: \"779f53c2-a052-43d1-8d17-09b3515e1812\") " Jun 06 09:29:28 crc kubenswrapper[4911]: I0606 09:29:28.689824 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/779f53c2-a052-43d1-8d17-09b3515e1812-kube-api-access-2r4w8" (OuterVolumeSpecName: "kube-api-access-2r4w8") pod "779f53c2-a052-43d1-8d17-09b3515e1812" (UID: "779f53c2-a052-43d1-8d17-09b3515e1812"). InnerVolumeSpecName "kube-api-access-2r4w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:28 crc kubenswrapper[4911]: I0606 09:29:28.788163 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r4w8\" (UniqueName: \"kubernetes.io/projected/779f53c2-a052-43d1-8d17-09b3515e1812-kube-api-access-2r4w8\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:29 crc kubenswrapper[4911]: I0606 09:29:29.276942 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-vpfq6" event={"ID":"779f53c2-a052-43d1-8d17-09b3515e1812","Type":"ContainerDied","Data":"1af9f2aa88c1bfdafc8ced230c88289a00c676c9cba8a12833e7c06c9275b43f"} Jun 06 09:29:29 crc kubenswrapper[4911]: I0606 09:29:29.276986 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1af9f2aa88c1bfdafc8ced230c88289a00c676c9cba8a12833e7c06c9275b43f" Jun 06 09:29:29 crc kubenswrapper[4911]: I0606 09:29:29.277056 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vpfq6" Jun 06 09:29:32 crc kubenswrapper[4911]: I0606 09:29:32.300860 4911 generic.go:334] "Generic (PLEG): container finished" podID="6d1f75cb-fd34-4f91-9ade-650845917e96" containerID="186271c08883afa401cdc66c33a894fce7dbbd5af99882e43bc481466923e0d4" exitCode=0 Jun 06 09:29:32 crc kubenswrapper[4911]: I0606 09:29:32.300952 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7lbp2" event={"ID":"6d1f75cb-fd34-4f91-9ade-650845917e96","Type":"ContainerDied","Data":"186271c08883afa401cdc66c33a894fce7dbbd5af99882e43bc481466923e0d4"} Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.332375 4911 generic.go:334] "Generic (PLEG): container finished" podID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerID="6f0518a6b3cc2f944aa4e2c7b254c3bf8b64829224f9a2eb7ac9e7286d602d50" exitCode=0 Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.332423 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abf307d7-9aa9-4d2f-9943-e3a085568096","Type":"ContainerDied","Data":"6f0518a6b3cc2f944aa4e2c7b254c3bf8b64829224f9a2eb7ac9e7286d602d50"} Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.337738 4911 generic.go:334] "Generic (PLEG): container finished" podID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerID="cb54fd730abcec5f423a30e6720800012bb616571840ae9eba50348833ebe2a7" exitCode=0 Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.337885 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4","Type":"ContainerDied","Data":"cb54fd730abcec5f423a30e6720800012bb616571840ae9eba50348833ebe2a7"} Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.364724 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.398429 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bbc248fe-b133-4e7e-aad5-c29a3c215e6b-etc-swift\") pod \"swift-storage-0\" (UID: \"bbc248fe-b133-4e7e-aad5-c29a3c215e6b\") " pod="openstack/swift-storage-0" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.611373 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.654726 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.769640 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-combined-ca-bundle\") pod \"6d1f75cb-fd34-4f91-9ade-650845917e96\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.770199 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-scripts\") pod \"6d1f75cb-fd34-4f91-9ade-650845917e96\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.770238 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-dispersionconf\") pod \"6d1f75cb-fd34-4f91-9ade-650845917e96\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.770319 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-ring-data-devices\") pod \"6d1f75cb-fd34-4f91-9ade-650845917e96\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.770416 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-swiftconf\") pod \"6d1f75cb-fd34-4f91-9ade-650845917e96\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.770460 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6d1f75cb-fd34-4f91-9ade-650845917e96-etc-swift\") pod \"6d1f75cb-fd34-4f91-9ade-650845917e96\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.770483 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhlwv\" (UniqueName: \"kubernetes.io/projected/6d1f75cb-fd34-4f91-9ade-650845917e96-kube-api-access-jhlwv\") pod \"6d1f75cb-fd34-4f91-9ade-650845917e96\" (UID: \"6d1f75cb-fd34-4f91-9ade-650845917e96\") " Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.771490 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "6d1f75cb-fd34-4f91-9ade-650845917e96" (UID: "6d1f75cb-fd34-4f91-9ade-650845917e96"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.783728 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d1f75cb-fd34-4f91-9ade-650845917e96-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6d1f75cb-fd34-4f91-9ade-650845917e96" (UID: "6d1f75cb-fd34-4f91-9ade-650845917e96"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.792270 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d1f75cb-fd34-4f91-9ade-650845917e96-kube-api-access-jhlwv" (OuterVolumeSpecName: "kube-api-access-jhlwv") pod "6d1f75cb-fd34-4f91-9ade-650845917e96" (UID: "6d1f75cb-fd34-4f91-9ade-650845917e96"). InnerVolumeSpecName "kube-api-access-jhlwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.793228 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "6d1f75cb-fd34-4f91-9ade-650845917e96" (UID: "6d1f75cb-fd34-4f91-9ade-650845917e96"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.801609 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d1f75cb-fd34-4f91-9ade-650845917e96" (UID: "6d1f75cb-fd34-4f91-9ade-650845917e96"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.807502 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "6d1f75cb-fd34-4f91-9ade-650845917e96" (UID: "6d1f75cb-fd34-4f91-9ade-650845917e96"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.817624 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-scripts" (OuterVolumeSpecName: "scripts") pod "6d1f75cb-fd34-4f91-9ade-650845917e96" (UID: "6d1f75cb-fd34-4f91-9ade-650845917e96"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.879105 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.879157 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.879169 4911 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-dispersionconf\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.879178 4911 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6d1f75cb-fd34-4f91-9ade-650845917e96-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.879189 4911 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6d1f75cb-fd34-4f91-9ade-650845917e96-swiftconf\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.879198 4911 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6d1f75cb-fd34-4f91-9ade-650845917e96-etc-swift\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:33 crc kubenswrapper[4911]: I0606 09:29:33.879207 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhlwv\" (UniqueName: \"kubernetes.io/projected/6d1f75cb-fd34-4f91-9ade-650845917e96-kube-api-access-jhlwv\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.246210 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.347028 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"9cab30fd328bc68d7dee76334f0ee48fec7201848d323a43fa68c86ef7e8bdb8"} Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.362968 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abf307d7-9aa9-4d2f-9943-e3a085568096","Type":"ContainerStarted","Data":"db8390c7890d5f767239b22d5379e46e15bf8a2ac339e7d9caddb2d672658514"} Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.363594 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.365593 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4","Type":"ContainerStarted","Data":"c1b603770fd0771eb755ba8401e1fc4a29c04489ed1107eae7ebc65146557e23"} Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.366278 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.368082 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-7lbp2" event={"ID":"6d1f75cb-fd34-4f91-9ade-650845917e96","Type":"ContainerDied","Data":"cb7378b357189067db74ccb35b78ae2c9e5820164afc8bd82f7ada5f599e69b3"} Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.368131 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb7378b357189067db74ccb35b78ae2c9e5820164afc8bd82f7ada5f599e69b3" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.368211 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-7lbp2" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.392120 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=45.935612526 podStartE2EDuration="54.392080262s" podCreationTimestamp="2025-06-06 09:28:40 +0000 UTC" firstStartedPulling="2025-06-06 09:28:52.025635455 +0000 UTC m=+943.301060998" lastFinishedPulling="2025-06-06 09:29:00.48208723 +0000 UTC m=+951.757528734" observedRunningTime="2025-06-06 09:29:34.38731984 +0000 UTC m=+985.662745403" watchObservedRunningTime="2025-06-06 09:29:34.392080262 +0000 UTC m=+985.667505805" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.563286 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=46.904891124 podStartE2EDuration="54.563262775s" podCreationTimestamp="2025-06-06 09:28:40 +0000 UTC" firstStartedPulling="2025-06-06 09:28:52.822151399 +0000 UTC m=+944.097576942" lastFinishedPulling="2025-06-06 09:29:00.48052305 +0000 UTC m=+951.755948593" observedRunningTime="2025-06-06 09:29:34.553080465 +0000 UTC m=+985.828506008" watchObservedRunningTime="2025-06-06 09:29:34.563262775 +0000 UTC m=+985.838688318" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.665717 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4dlv9" podUID="5a8407a6-611d-477c-8530-9c1728797994" containerName="ovn-controller" probeResult="failure" output=< Jun 06 09:29:34 crc kubenswrapper[4911]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jun 06 09:29:34 crc kubenswrapper[4911]: > Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.698297 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.704209 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-jjkvl" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.912673 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4dlv9-config-9z7d8"] Jun 06 09:29:34 crc kubenswrapper[4911]: E0606 09:29:34.913024 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779f53c2-a052-43d1-8d17-09b3515e1812" containerName="mariadb-database-create" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913043 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="779f53c2-a052-43d1-8d17-09b3515e1812" containerName="mariadb-database-create" Jun 06 09:29:34 crc kubenswrapper[4911]: E0606 09:29:34.913056 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92b2b6d0-40dc-43a8-bcc1-6094912af4d2" containerName="mariadb-database-create" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913069 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="92b2b6d0-40dc-43a8-bcc1-6094912af4d2" containerName="mariadb-database-create" Jun 06 09:29:34 crc kubenswrapper[4911]: E0606 09:29:34.913080 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd3241c2-4261-4525-b5dd-9a9711232726" containerName="dnsmasq-dns" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913129 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd3241c2-4261-4525-b5dd-9a9711232726" containerName="dnsmasq-dns" Jun 06 09:29:34 crc kubenswrapper[4911]: E0606 09:29:34.913150 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd3241c2-4261-4525-b5dd-9a9711232726" containerName="init" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913156 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd3241c2-4261-4525-b5dd-9a9711232726" containerName="init" Jun 06 09:29:34 crc kubenswrapper[4911]: E0606 09:29:34.913166 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d1f75cb-fd34-4f91-9ade-650845917e96" containerName="swift-ring-rebalance" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913172 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d1f75cb-fd34-4f91-9ade-650845917e96" containerName="swift-ring-rebalance" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913388 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="92b2b6d0-40dc-43a8-bcc1-6094912af4d2" containerName="mariadb-database-create" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913400 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd3241c2-4261-4525-b5dd-9a9711232726" containerName="dnsmasq-dns" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913409 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="779f53c2-a052-43d1-8d17-09b3515e1812" containerName="mariadb-database-create" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913420 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d1f75cb-fd34-4f91-9ade-650845917e96" containerName="swift-ring-rebalance" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.913963 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:34 crc kubenswrapper[4911]: I0606 09:29:34.937034 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4dlv9-config-9z7d8"] Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.057925 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run-ovn\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.058074 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-log-ovn\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.058140 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mkj2\" (UniqueName: \"kubernetes.io/projected/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-kube-api-access-4mkj2\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.058702 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.058767 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-scripts\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.160799 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-log-ovn\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.160859 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mkj2\" (UniqueName: \"kubernetes.io/projected/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-kube-api-access-4mkj2\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.160887 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.160908 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-scripts\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.160956 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run-ovn\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.161217 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-log-ovn\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.161243 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.161262 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run-ovn\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.163470 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-scripts\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.174536 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6042-account-create-ttp6q"] Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.175699 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6042-account-create-ttp6q" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.179293 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.182472 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mkj2\" (UniqueName: \"kubernetes.io/projected/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-kube-api-access-4mkj2\") pod \"ovn-controller-4dlv9-config-9z7d8\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.186391 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6042-account-create-ttp6q"] Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.240115 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.364007 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9fw7\" (UniqueName: \"kubernetes.io/projected/16544562-77d9-4360-856e-5e70c369b7bb-kube-api-access-n9fw7\") pod \"keystone-6042-account-create-ttp6q\" (UID: \"16544562-77d9-4360-856e-5e70c369b7bb\") " pod="openstack/keystone-6042-account-create-ttp6q" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.465662 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9fw7\" (UniqueName: \"kubernetes.io/projected/16544562-77d9-4360-856e-5e70c369b7bb-kube-api-access-n9fw7\") pod \"keystone-6042-account-create-ttp6q\" (UID: \"16544562-77d9-4360-856e-5e70c369b7bb\") " pod="openstack/keystone-6042-account-create-ttp6q" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.496180 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9fw7\" (UniqueName: \"kubernetes.io/projected/16544562-77d9-4360-856e-5e70c369b7bb-kube-api-access-n9fw7\") pod \"keystone-6042-account-create-ttp6q\" (UID: \"16544562-77d9-4360-856e-5e70c369b7bb\") " pod="openstack/keystone-6042-account-create-ttp6q" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.501029 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b189-account-create-hwbdc"] Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.502048 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b189-account-create-hwbdc" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.504839 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.509252 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b189-account-create-hwbdc"] Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.533392 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6042-account-create-ttp6q" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.669508 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgql7\" (UniqueName: \"kubernetes.io/projected/753820ef-863f-428c-bfc7-5dc548a56eb1-kube-api-access-sgql7\") pod \"placement-b189-account-create-hwbdc\" (UID: \"753820ef-863f-428c-bfc7-5dc548a56eb1\") " pod="openstack/placement-b189-account-create-hwbdc" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.690810 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4dlv9-config-9z7d8"] Jun 06 09:29:35 crc kubenswrapper[4911]: W0606 09:29:35.697335 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3ad99d0_bb87_4526_9fb8_7ba427ae893d.slice/crio-97495aff5573468e20be7fe9f279efb0cfc508ed6bbdf36f480b2a1145141126 WatchSource:0}: Error finding container 97495aff5573468e20be7fe9f279efb0cfc508ed6bbdf36f480b2a1145141126: Status 404 returned error can't find the container with id 97495aff5573468e20be7fe9f279efb0cfc508ed6bbdf36f480b2a1145141126 Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.776361 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgql7\" (UniqueName: \"kubernetes.io/projected/753820ef-863f-428c-bfc7-5dc548a56eb1-kube-api-access-sgql7\") pod \"placement-b189-account-create-hwbdc\" (UID: \"753820ef-863f-428c-bfc7-5dc548a56eb1\") " pod="openstack/placement-b189-account-create-hwbdc" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.816388 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgql7\" (UniqueName: \"kubernetes.io/projected/753820ef-863f-428c-bfc7-5dc548a56eb1-kube-api-access-sgql7\") pod \"placement-b189-account-create-hwbdc\" (UID: \"753820ef-863f-428c-bfc7-5dc548a56eb1\") " pod="openstack/placement-b189-account-create-hwbdc" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.854981 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b189-account-create-hwbdc" Jun 06 09:29:35 crc kubenswrapper[4911]: I0606 09:29:35.981378 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6042-account-create-ttp6q"] Jun 06 09:29:36 crc kubenswrapper[4911]: W0606 09:29:36.001302 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16544562_77d9_4360_856e_5e70c369b7bb.slice/crio-3862bc45065270f8ee321f746634eb02705c02dd7f670f37571d4cd7ce5b3f58 WatchSource:0}: Error finding container 3862bc45065270f8ee321f746634eb02705c02dd7f670f37571d4cd7ce5b3f58: Status 404 returned error can't find the container with id 3862bc45065270f8ee321f746634eb02705c02dd7f670f37571d4cd7ce5b3f58 Jun 06 09:29:36 crc kubenswrapper[4911]: I0606 09:29:36.354153 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b189-account-create-hwbdc"] Jun 06 09:29:36 crc kubenswrapper[4911]: I0606 09:29:36.385158 4911 generic.go:334] "Generic (PLEG): container finished" podID="16544562-77d9-4360-856e-5e70c369b7bb" containerID="b97422f12350163894fa2e3957699c6848e7a4d72d5c02f1ae7bb75f07e389fa" exitCode=0 Jun 06 09:29:36 crc kubenswrapper[4911]: I0606 09:29:36.385230 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6042-account-create-ttp6q" event={"ID":"16544562-77d9-4360-856e-5e70c369b7bb","Type":"ContainerDied","Data":"b97422f12350163894fa2e3957699c6848e7a4d72d5c02f1ae7bb75f07e389fa"} Jun 06 09:29:36 crc kubenswrapper[4911]: I0606 09:29:36.385257 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6042-account-create-ttp6q" event={"ID":"16544562-77d9-4360-856e-5e70c369b7bb","Type":"ContainerStarted","Data":"3862bc45065270f8ee321f746634eb02705c02dd7f670f37571d4cd7ce5b3f58"} Jun 06 09:29:36 crc kubenswrapper[4911]: I0606 09:29:36.388651 4911 generic.go:334] "Generic (PLEG): container finished" podID="e3ad99d0-bb87-4526-9fb8-7ba427ae893d" containerID="04a9d8737dcf583de2631d4db21c886d7392e5b49046024ab08d7de012088b5c" exitCode=0 Jun 06 09:29:36 crc kubenswrapper[4911]: I0606 09:29:36.388688 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4dlv9-config-9z7d8" event={"ID":"e3ad99d0-bb87-4526-9fb8-7ba427ae893d","Type":"ContainerDied","Data":"04a9d8737dcf583de2631d4db21c886d7392e5b49046024ab08d7de012088b5c"} Jun 06 09:29:36 crc kubenswrapper[4911]: I0606 09:29:36.388709 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4dlv9-config-9z7d8" event={"ID":"e3ad99d0-bb87-4526-9fb8-7ba427ae893d","Type":"ContainerStarted","Data":"97495aff5573468e20be7fe9f279efb0cfc508ed6bbdf36f480b2a1145141126"} Jun 06 09:29:36 crc kubenswrapper[4911]: W0606 09:29:36.464593 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod753820ef_863f_428c_bfc7_5dc548a56eb1.slice/crio-eb7c8a5adeaa9f6063f99fdab1a7816bf149e9b5acd7e7dd89c0519662c35931 WatchSource:0}: Error finding container eb7c8a5adeaa9f6063f99fdab1a7816bf149e9b5acd7e7dd89c0519662c35931: Status 404 returned error can't find the container with id eb7c8a5adeaa9f6063f99fdab1a7816bf149e9b5acd7e7dd89c0519662c35931 Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.404283 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"99a91fbab875f954818337c3d8384210b3c9befb7f6bf845702eff060b0803ad"} Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.404933 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"0605831215ee0fc7013e80067c5be8744b110bbd098b784c2ab766964cc6c905"} Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.404944 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"8bf0a697b5d8096b2318cfa149de852119a42193a383e68cb127eb1acfafa6dd"} Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.404954 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"984aec2526b0628caaa0aa75146287ebf9fde360b39479f56c81a94fbc5b96c5"} Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.407424 4911 generic.go:334] "Generic (PLEG): container finished" podID="753820ef-863f-428c-bfc7-5dc548a56eb1" containerID="c899ee871e6fae7e8d4689a0585665d2682d26b4415b4fd262d06a4d5bdf2101" exitCode=0 Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.407930 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b189-account-create-hwbdc" event={"ID":"753820ef-863f-428c-bfc7-5dc548a56eb1","Type":"ContainerDied","Data":"c899ee871e6fae7e8d4689a0585665d2682d26b4415b4fd262d06a4d5bdf2101"} Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.407957 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b189-account-create-hwbdc" event={"ID":"753820ef-863f-428c-bfc7-5dc548a56eb1","Type":"ContainerStarted","Data":"eb7c8a5adeaa9f6063f99fdab1a7816bf149e9b5acd7e7dd89c0519662c35931"} Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.771030 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.870687 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6042-account-create-ttp6q" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.917514 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mkj2\" (UniqueName: \"kubernetes.io/projected/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-kube-api-access-4mkj2\") pod \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.917684 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-log-ovn\") pod \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.917782 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "e3ad99d0-bb87-4526-9fb8-7ba427ae893d" (UID: "e3ad99d0-bb87-4526-9fb8-7ba427ae893d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.917863 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run-ovn\") pod \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.917931 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "e3ad99d0-bb87-4526-9fb8-7ba427ae893d" (UID: "e3ad99d0-bb87-4526-9fb8-7ba427ae893d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.918005 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-scripts\") pod \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.919241 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-scripts" (OuterVolumeSpecName: "scripts") pod "e3ad99d0-bb87-4526-9fb8-7ba427ae893d" (UID: "e3ad99d0-bb87-4526-9fb8-7ba427ae893d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.921056 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run\") pod \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\" (UID: \"e3ad99d0-bb87-4526-9fb8-7ba427ae893d\") " Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.921540 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run" (OuterVolumeSpecName: "var-run") pod "e3ad99d0-bb87-4526-9fb8-7ba427ae893d" (UID: "e3ad99d0-bb87-4526-9fb8-7ba427ae893d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.922271 4911 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.922298 4911 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.922312 4911 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.922324 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:37 crc kubenswrapper[4911]: I0606 09:29:37.923539 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-kube-api-access-4mkj2" (OuterVolumeSpecName: "kube-api-access-4mkj2") pod "e3ad99d0-bb87-4526-9fb8-7ba427ae893d" (UID: "e3ad99d0-bb87-4526-9fb8-7ba427ae893d"). InnerVolumeSpecName "kube-api-access-4mkj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.023467 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9fw7\" (UniqueName: \"kubernetes.io/projected/16544562-77d9-4360-856e-5e70c369b7bb-kube-api-access-n9fw7\") pod \"16544562-77d9-4360-856e-5e70c369b7bb\" (UID: \"16544562-77d9-4360-856e-5e70c369b7bb\") " Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.023846 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mkj2\" (UniqueName: \"kubernetes.io/projected/e3ad99d0-bb87-4526-9fb8-7ba427ae893d-kube-api-access-4mkj2\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.027255 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16544562-77d9-4360-856e-5e70c369b7bb-kube-api-access-n9fw7" (OuterVolumeSpecName: "kube-api-access-n9fw7") pod "16544562-77d9-4360-856e-5e70c369b7bb" (UID: "16544562-77d9-4360-856e-5e70c369b7bb"). InnerVolumeSpecName "kube-api-access-n9fw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.125529 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9fw7\" (UniqueName: \"kubernetes.io/projected/16544562-77d9-4360-856e-5e70c369b7bb-kube-api-access-n9fw7\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.417375 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6042-account-create-ttp6q" event={"ID":"16544562-77d9-4360-856e-5e70c369b7bb","Type":"ContainerDied","Data":"3862bc45065270f8ee321f746634eb02705c02dd7f670f37571d4cd7ce5b3f58"} Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.417419 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3862bc45065270f8ee321f746634eb02705c02dd7f670f37571d4cd7ce5b3f58" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.417476 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6042-account-create-ttp6q" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.420340 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4dlv9-config-9z7d8" event={"ID":"e3ad99d0-bb87-4526-9fb8-7ba427ae893d","Type":"ContainerDied","Data":"97495aff5573468e20be7fe9f279efb0cfc508ed6bbdf36f480b2a1145141126"} Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.420425 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97495aff5573468e20be7fe9f279efb0cfc508ed6bbdf36f480b2a1145141126" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.420537 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4dlv9-config-9z7d8" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.744221 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b189-account-create-hwbdc" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.838463 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgql7\" (UniqueName: \"kubernetes.io/projected/753820ef-863f-428c-bfc7-5dc548a56eb1-kube-api-access-sgql7\") pod \"753820ef-863f-428c-bfc7-5dc548a56eb1\" (UID: \"753820ef-863f-428c-bfc7-5dc548a56eb1\") " Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.844574 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/753820ef-863f-428c-bfc7-5dc548a56eb1-kube-api-access-sgql7" (OuterVolumeSpecName: "kube-api-access-sgql7") pod "753820ef-863f-428c-bfc7-5dc548a56eb1" (UID: "753820ef-863f-428c-bfc7-5dc548a56eb1"). InnerVolumeSpecName "kube-api-access-sgql7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.877164 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4dlv9-config-9z7d8"] Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.885925 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4dlv9-config-9z7d8"] Jun 06 09:29:38 crc kubenswrapper[4911]: I0606 09:29:38.944319 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgql7\" (UniqueName: \"kubernetes.io/projected/753820ef-863f-428c-bfc7-5dc548a56eb1-kube-api-access-sgql7\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.430528 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"b8796eb835b7caed422549a9e12145f7aeedab4ad523033d4071d9b57cf10b6d"} Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.430880 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"852bada059b762b8c548e17bb94f6393f9b8e4de5304d174e1947999358db6d0"} Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.430891 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"bf6cbd18a5294a55f42e286e038d1d8115c2b4b86b6525b3450c076be5a9b5e2"} Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.430901 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"bb3e49a1dffefbaf920531b5dff7bb04f0730b3daadeb3f56bb0bd2534608bf8"} Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.432151 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b189-account-create-hwbdc" event={"ID":"753820ef-863f-428c-bfc7-5dc548a56eb1","Type":"ContainerDied","Data":"eb7c8a5adeaa9f6063f99fdab1a7816bf149e9b5acd7e7dd89c0519662c35931"} Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.432180 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb7c8a5adeaa9f6063f99fdab1a7816bf149e9b5acd7e7dd89c0519662c35931" Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.432206 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b189-account-create-hwbdc" Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.707350 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-4dlv9" Jun 06 09:29:39 crc kubenswrapper[4911]: I0606 09:29:39.960787 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3ad99d0-bb87-4526-9fb8-7ba427ae893d" path="/var/lib/kubelet/pods/e3ad99d0-bb87-4526-9fb8-7ba427ae893d/volumes" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.036472 4911 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podf904c35e-e8d4-40be-97b0-3c897429628b"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podf904c35e-e8d4-40be-97b0-3c897429628b] : Timed out while waiting for systemd to remove kubepods-besteffort-podf904c35e_e8d4_40be_97b0_3c897429628b.slice" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.812688 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-8066-account-create-94rck"] Jun 06 09:29:40 crc kubenswrapper[4911]: E0606 09:29:40.814495 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="753820ef-863f-428c-bfc7-5dc548a56eb1" containerName="mariadb-account-create" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.814604 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="753820ef-863f-428c-bfc7-5dc548a56eb1" containerName="mariadb-account-create" Jun 06 09:29:40 crc kubenswrapper[4911]: E0606 09:29:40.814689 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16544562-77d9-4360-856e-5e70c369b7bb" containerName="mariadb-account-create" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.814758 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="16544562-77d9-4360-856e-5e70c369b7bb" containerName="mariadb-account-create" Jun 06 09:29:40 crc kubenswrapper[4911]: E0606 09:29:40.814856 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ad99d0-bb87-4526-9fb8-7ba427ae893d" containerName="ovn-config" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.814960 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ad99d0-bb87-4526-9fb8-7ba427ae893d" containerName="ovn-config" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.815199 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3ad99d0-bb87-4526-9fb8-7ba427ae893d" containerName="ovn-config" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.815278 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="753820ef-863f-428c-bfc7-5dc548a56eb1" containerName="mariadb-account-create" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.815352 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="16544562-77d9-4360-856e-5e70c369b7bb" containerName="mariadb-account-create" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.815980 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8066-account-create-94rck" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.820726 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.826333 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8066-account-create-94rck"] Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.893028 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jc7b\" (UniqueName: \"kubernetes.io/projected/1d268c6a-6cc1-4e52-8893-ed48861ea882-kube-api-access-6jc7b\") pod \"glance-8066-account-create-94rck\" (UID: \"1d268c6a-6cc1-4e52-8893-ed48861ea882\") " pod="openstack/glance-8066-account-create-94rck" Jun 06 09:29:40 crc kubenswrapper[4911]: I0606 09:29:40.995218 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jc7b\" (UniqueName: \"kubernetes.io/projected/1d268c6a-6cc1-4e52-8893-ed48861ea882-kube-api-access-6jc7b\") pod \"glance-8066-account-create-94rck\" (UID: \"1d268c6a-6cc1-4e52-8893-ed48861ea882\") " pod="openstack/glance-8066-account-create-94rck" Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.023471 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jc7b\" (UniqueName: \"kubernetes.io/projected/1d268c6a-6cc1-4e52-8893-ed48861ea882-kube-api-access-6jc7b\") pod \"glance-8066-account-create-94rck\" (UID: \"1d268c6a-6cc1-4e52-8893-ed48861ea882\") " pod="openstack/glance-8066-account-create-94rck" Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.134636 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8066-account-create-94rck" Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.477646 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"1cbbea3df6e7e95b294eba2f873b0e29625e2905a094f71a7b302728628596d4"} Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.478031 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"b5e095b9fe3123be74dc095f1b7270ae10e3fe7f48dabaaf5d590746c5e8d5fb"} Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.478045 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"873aed1f1edabe657672149da601cc886207e0548df25839c7553ec5b0e2957c"} Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.478058 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"b1c842bd79e931de9f49f20c2efd507d848540f17449e3a9eb9b07eea3fd79aa"} Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.478069 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"d5a2a84aaad33f5621fed27115d26b29f6737fbd3556d69af597d070a2b8cc76"} Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.478078 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"7491c047988ffd42beedbbba0cf4cf85892bd3b5f5cfb6bce01b1c950ef9fb98"} Jun 06 09:29:41 crc kubenswrapper[4911]: I0606 09:29:41.619460 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8066-account-create-94rck"] Jun 06 09:29:41 crc kubenswrapper[4911]: W0606 09:29:41.619555 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d268c6a_6cc1_4e52_8893_ed48861ea882.slice/crio-b25e892755a7739a6e5576c4d3ae4c8d29a96298d728e25f606fab02e074c553 WatchSource:0}: Error finding container b25e892755a7739a6e5576c4d3ae4c8d29a96298d728e25f606fab02e074c553: Status 404 returned error can't find the container with id b25e892755a7739a6e5576c4d3ae4c8d29a96298d728e25f606fab02e074c553 Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.486786 4911 generic.go:334] "Generic (PLEG): container finished" podID="1d268c6a-6cc1-4e52-8893-ed48861ea882" containerID="717f10e465cc9993058fdaaf3f0b6b81c900abe00cccb4fc71ad70340b05f9b9" exitCode=0 Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.486871 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8066-account-create-94rck" event={"ID":"1d268c6a-6cc1-4e52-8893-ed48861ea882","Type":"ContainerDied","Data":"717f10e465cc9993058fdaaf3f0b6b81c900abe00cccb4fc71ad70340b05f9b9"} Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.486906 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8066-account-create-94rck" event={"ID":"1d268c6a-6cc1-4e52-8893-ed48861ea882","Type":"ContainerStarted","Data":"b25e892755a7739a6e5576c4d3ae4c8d29a96298d728e25f606fab02e074c553"} Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.494993 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bbc248fe-b133-4e7e-aad5-c29a3c215e6b","Type":"ContainerStarted","Data":"f74d95682596bfc1dd0524dcaba2754d994d3b242c0a884c5de50074203ed550"} Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.536205 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.537472755 podStartE2EDuration="26.536184523s" podCreationTimestamp="2025-06-06 09:29:16 +0000 UTC" firstStartedPulling="2025-06-06 09:29:34.254027679 +0000 UTC m=+985.529453232" lastFinishedPulling="2025-06-06 09:29:40.252739457 +0000 UTC m=+991.528165000" observedRunningTime="2025-06-06 09:29:42.532205771 +0000 UTC m=+993.807631334" watchObservedRunningTime="2025-06-06 09:29:42.536184523 +0000 UTC m=+993.811610066" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.782712 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-854687b54c-ssgsr"] Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.784463 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.786385 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.805039 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-854687b54c-ssgsr"] Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.921257 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-nb\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.921340 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-config\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.921372 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-svc\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.921392 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-sb\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.921426 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-swift-storage-0\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:42 crc kubenswrapper[4911]: I0606 09:29:42.921494 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfvsk\" (UniqueName: \"kubernetes.io/projected/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-kube-api-access-tfvsk\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.023814 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-nb\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.023923 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-config\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.023961 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-svc\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.023981 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-sb\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.024021 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-swift-storage-0\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.024110 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfvsk\" (UniqueName: \"kubernetes.io/projected/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-kube-api-access-tfvsk\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.025156 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-nb\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.025793 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-swift-storage-0\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.025922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-svc\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.025941 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-sb\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.026195 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-config\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.045534 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfvsk\" (UniqueName: \"kubernetes.io/projected/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-kube-api-access-tfvsk\") pod \"dnsmasq-dns-854687b54c-ssgsr\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.102564 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.533363 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-854687b54c-ssgsr"] Jun 06 09:29:43 crc kubenswrapper[4911]: W0606 09:29:43.538764 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeaf33d50_92ab_4ef6_8a0d_9ab0eb177204.slice/crio-0001a3d2af78eeba0716de326153e1a51a3bde847732afc9fec9c6ef6041f4be WatchSource:0}: Error finding container 0001a3d2af78eeba0716de326153e1a51a3bde847732afc9fec9c6ef6041f4be: Status 404 returned error can't find the container with id 0001a3d2af78eeba0716de326153e1a51a3bde847732afc9fec9c6ef6041f4be Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.843328 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8066-account-create-94rck" Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.941697 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jc7b\" (UniqueName: \"kubernetes.io/projected/1d268c6a-6cc1-4e52-8893-ed48861ea882-kube-api-access-6jc7b\") pod \"1d268c6a-6cc1-4e52-8893-ed48861ea882\" (UID: \"1d268c6a-6cc1-4e52-8893-ed48861ea882\") " Jun 06 09:29:43 crc kubenswrapper[4911]: I0606 09:29:43.945936 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d268c6a-6cc1-4e52-8893-ed48861ea882-kube-api-access-6jc7b" (OuterVolumeSpecName: "kube-api-access-6jc7b") pod "1d268c6a-6cc1-4e52-8893-ed48861ea882" (UID: "1d268c6a-6cc1-4e52-8893-ed48861ea882"). InnerVolumeSpecName "kube-api-access-6jc7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:44 crc kubenswrapper[4911]: I0606 09:29:44.043568 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jc7b\" (UniqueName: \"kubernetes.io/projected/1d268c6a-6cc1-4e52-8893-ed48861ea882-kube-api-access-6jc7b\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:44 crc kubenswrapper[4911]: I0606 09:29:44.515020 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8066-account-create-94rck" Jun 06 09:29:44 crc kubenswrapper[4911]: I0606 09:29:44.515033 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8066-account-create-94rck" event={"ID":"1d268c6a-6cc1-4e52-8893-ed48861ea882","Type":"ContainerDied","Data":"b25e892755a7739a6e5576c4d3ae4c8d29a96298d728e25f606fab02e074c553"} Jun 06 09:29:44 crc kubenswrapper[4911]: I0606 09:29:44.515069 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b25e892755a7739a6e5576c4d3ae4c8d29a96298d728e25f606fab02e074c553" Jun 06 09:29:44 crc kubenswrapper[4911]: I0606 09:29:44.516511 4911 generic.go:334] "Generic (PLEG): container finished" podID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerID="42b81dfa67bcddefa77a50445bf796a73e52419759b5f783c2c80358ac85de43" exitCode=0 Jun 06 09:29:44 crc kubenswrapper[4911]: I0606 09:29:44.516544 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" event={"ID":"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204","Type":"ContainerDied","Data":"42b81dfa67bcddefa77a50445bf796a73e52419759b5f783c2c80358ac85de43"} Jun 06 09:29:44 crc kubenswrapper[4911]: I0606 09:29:44.516565 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" event={"ID":"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204","Type":"ContainerStarted","Data":"0001a3d2af78eeba0716de326153e1a51a3bde847732afc9fec9c6ef6041f4be"} Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.527014 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" event={"ID":"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204","Type":"ContainerStarted","Data":"91cef758908ebb576cf6fa2d7de7de17fc59ba392f3e51caccdec8607242595e"} Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.527437 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.560601 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" podStartSLOduration=3.560574036 podStartE2EDuration="3.560574036s" podCreationTimestamp="2025-06-06 09:29:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:29:45.552942321 +0000 UTC m=+996.828367874" watchObservedRunningTime="2025-06-06 09:29:45.560574036 +0000 UTC m=+996.835999579" Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.973229 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-9bdfb"] Jun 06 09:29:45 crc kubenswrapper[4911]: E0606 09:29:45.973662 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d268c6a-6cc1-4e52-8893-ed48861ea882" containerName="mariadb-account-create" Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.973688 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d268c6a-6cc1-4e52-8893-ed48861ea882" containerName="mariadb-account-create" Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.976560 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d268c6a-6cc1-4e52-8893-ed48861ea882" containerName="mariadb-account-create" Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.977519 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.980147 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-z87sl" Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.980324 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9bdfb"] Jun 06 09:29:45 crc kubenswrapper[4911]: I0606 09:29:45.981889 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.082811 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67nhg\" (UniqueName: \"kubernetes.io/projected/c0dd5a73-4e05-440f-90e7-f432562f3c3d-kube-api-access-67nhg\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.083410 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-config-data\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.083526 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-db-sync-config-data\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.083594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-combined-ca-bundle\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.185431 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-db-sync-config-data\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.185533 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-combined-ca-bundle\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.185568 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67nhg\" (UniqueName: \"kubernetes.io/projected/c0dd5a73-4e05-440f-90e7-f432562f3c3d-kube-api-access-67nhg\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.185615 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-config-data\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.191801 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-config-data\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.191874 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-combined-ca-bundle\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.192374 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-db-sync-config-data\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.204820 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67nhg\" (UniqueName: \"kubernetes.io/projected/c0dd5a73-4e05-440f-90e7-f432562f3c3d-kube-api-access-67nhg\") pod \"glance-db-sync-9bdfb\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.295880 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9bdfb" Jun 06 09:29:46 crc kubenswrapper[4911]: I0606 09:29:46.813554 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9bdfb"] Jun 06 09:29:46 crc kubenswrapper[4911]: W0606 09:29:46.820801 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0dd5a73_4e05_440f_90e7_f432562f3c3d.slice/crio-58963808f94d84baed02c3dbec9c00bda39e5c321d53de8e49923c6357e41316 WatchSource:0}: Error finding container 58963808f94d84baed02c3dbec9c00bda39e5c321d53de8e49923c6357e41316: Status 404 returned error can't find the container with id 58963808f94d84baed02c3dbec9c00bda39e5c321d53de8e49923c6357e41316 Jun 06 09:29:47 crc kubenswrapper[4911]: I0606 09:29:47.543824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9bdfb" event={"ID":"c0dd5a73-4e05-440f-90e7-f432562f3c3d","Type":"ContainerStarted","Data":"58963808f94d84baed02c3dbec9c00bda39e5c321d53de8e49923c6357e41316"} Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.349266 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.397659 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.681557 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-xg7d9"] Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.682818 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xg7d9" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.746861 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-xg7d9"] Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.791080 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-x87xj"] Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.796877 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45pwv\" (UniqueName: \"kubernetes.io/projected/d7cb25b1-db79-44b5-832b-bdeaf22c1c93-kube-api-access-45pwv\") pod \"cinder-db-create-xg7d9\" (UID: \"d7cb25b1-db79-44b5-832b-bdeaf22c1c93\") " pod="openstack/cinder-db-create-xg7d9" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.797333 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x87xj" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.804664 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-x87xj"] Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.883135 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-qt79z"] Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.884397 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-qt79z" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.899683 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45pwv\" (UniqueName: \"kubernetes.io/projected/d7cb25b1-db79-44b5-832b-bdeaf22c1c93-kube-api-access-45pwv\") pod \"cinder-db-create-xg7d9\" (UID: \"d7cb25b1-db79-44b5-832b-bdeaf22c1c93\") " pod="openstack/cinder-db-create-xg7d9" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.899813 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dm29\" (UniqueName: \"kubernetes.io/projected/30ec5b63-8433-471e-8a86-d21652dd0367-kube-api-access-5dm29\") pod \"barbican-db-create-x87xj\" (UID: \"30ec5b63-8433-471e-8a86-d21652dd0367\") " pod="openstack/barbican-db-create-x87xj" Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.924614 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-qt79z"] Jun 06 09:29:52 crc kubenswrapper[4911]: I0606 09:29:52.939794 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45pwv\" (UniqueName: \"kubernetes.io/projected/d7cb25b1-db79-44b5-832b-bdeaf22c1c93-kube-api-access-45pwv\") pod \"cinder-db-create-xg7d9\" (UID: \"d7cb25b1-db79-44b5-832b-bdeaf22c1c93\") " pod="openstack/cinder-db-create-xg7d9" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.001399 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d758\" (UniqueName: \"kubernetes.io/projected/ddbdc914-9643-4c5f-ae96-c519a85f1088-kube-api-access-9d758\") pod \"manila-db-create-qt79z\" (UID: \"ddbdc914-9643-4c5f-ae96-c519a85f1088\") " pod="openstack/manila-db-create-qt79z" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.001700 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dm29\" (UniqueName: \"kubernetes.io/projected/30ec5b63-8433-471e-8a86-d21652dd0367-kube-api-access-5dm29\") pod \"barbican-db-create-x87xj\" (UID: \"30ec5b63-8433-471e-8a86-d21652dd0367\") " pod="openstack/barbican-db-create-x87xj" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.004231 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xg7d9" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.020550 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dm29\" (UniqueName: \"kubernetes.io/projected/30ec5b63-8433-471e-8a86-d21652dd0367-kube-api-access-5dm29\") pod \"barbican-db-create-x87xj\" (UID: \"30ec5b63-8433-471e-8a86-d21652dd0367\") " pod="openstack/barbican-db-create-x87xj" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.102991 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d758\" (UniqueName: \"kubernetes.io/projected/ddbdc914-9643-4c5f-ae96-c519a85f1088-kube-api-access-9d758\") pod \"manila-db-create-qt79z\" (UID: \"ddbdc914-9643-4c5f-ae96-c519a85f1088\") " pod="openstack/manila-db-create-qt79z" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.103779 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.116352 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x87xj" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.118854 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d758\" (UniqueName: \"kubernetes.io/projected/ddbdc914-9643-4c5f-ae96-c519a85f1088-kube-api-access-9d758\") pod \"manila-db-create-qt79z\" (UID: \"ddbdc914-9643-4c5f-ae96-c519a85f1088\") " pod="openstack/manila-db-create-qt79z" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.161911 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844f6c47c-tc4ph"] Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.162182 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" podUID="b62868b9-909f-4974-a624-796bfb6a2372" containerName="dnsmasq-dns" containerID="cri-o://845fd4412e94515340e61b45021359a7772ed0a4bf081cf4de63a4150156b123" gracePeriod=10 Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.188086 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-mvtw8"] Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.189334 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mvtw8" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.211368 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-qt79z" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.213662 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mvtw8"] Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.306326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq58c\" (UniqueName: \"kubernetes.io/projected/ddb2c431-3ba8-4973-935f-a1922bf220e7-kube-api-access-vq58c\") pod \"neutron-db-create-mvtw8\" (UID: \"ddb2c431-3ba8-4973-935f-a1922bf220e7\") " pod="openstack/neutron-db-create-mvtw8" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.408587 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq58c\" (UniqueName: \"kubernetes.io/projected/ddb2c431-3ba8-4973-935f-a1922bf220e7-kube-api-access-vq58c\") pod \"neutron-db-create-mvtw8\" (UID: \"ddb2c431-3ba8-4973-935f-a1922bf220e7\") " pod="openstack/neutron-db-create-mvtw8" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.429271 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq58c\" (UniqueName: \"kubernetes.io/projected/ddb2c431-3ba8-4973-935f-a1922bf220e7-kube-api-access-vq58c\") pod \"neutron-db-create-mvtw8\" (UID: \"ddb2c431-3ba8-4973-935f-a1922bf220e7\") " pod="openstack/neutron-db-create-mvtw8" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.511920 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mvtw8" Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.607408 4911 generic.go:334] "Generic (PLEG): container finished" podID="b62868b9-909f-4974-a624-796bfb6a2372" containerID="845fd4412e94515340e61b45021359a7772ed0a4bf081cf4de63a4150156b123" exitCode=0 Jun 06 09:29:53 crc kubenswrapper[4911]: I0606 09:29:53.607456 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" event={"ID":"b62868b9-909f-4974-a624-796bfb6a2372","Type":"ContainerDied","Data":"845fd4412e94515340e61b45021359a7772ed0a4bf081cf4de63a4150156b123"} Jun 06 09:29:56 crc kubenswrapper[4911]: I0606 09:29:56.487128 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" podUID="b62868b9-909f-4974-a624-796bfb6a2372" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: connect: connection refused" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.629341 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.656739 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" event={"ID":"b62868b9-909f-4974-a624-796bfb6a2372","Type":"ContainerDied","Data":"a99255ccfc0269f3b9abfb586ceffb353a9a70a6c9536a0974bd7b439041a43f"} Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.656786 4911 scope.go:117] "RemoveContainer" containerID="845fd4412e94515340e61b45021359a7772ed0a4bf081cf4de63a4150156b123" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.656910 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844f6c47c-tc4ph" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.705215 4911 scope.go:117] "RemoveContainer" containerID="836f143f536c17110752a9ab56b614d830499e6a733eacfabdbdda0480e59c50" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.784595 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-nb\") pod \"b62868b9-909f-4974-a624-796bfb6a2372\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.784659 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-sb\") pod \"b62868b9-909f-4974-a624-796bfb6a2372\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.784761 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-dns-svc\") pod \"b62868b9-909f-4974-a624-796bfb6a2372\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.784883 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-config\") pod \"b62868b9-909f-4974-a624-796bfb6a2372\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.784915 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffg8c\" (UniqueName: \"kubernetes.io/projected/b62868b9-909f-4974-a624-796bfb6a2372-kube-api-access-ffg8c\") pod \"b62868b9-909f-4974-a624-796bfb6a2372\" (UID: \"b62868b9-909f-4974-a624-796bfb6a2372\") " Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.791692 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b62868b9-909f-4974-a624-796bfb6a2372-kube-api-access-ffg8c" (OuterVolumeSpecName: "kube-api-access-ffg8c") pod "b62868b9-909f-4974-a624-796bfb6a2372" (UID: "b62868b9-909f-4974-a624-796bfb6a2372"). InnerVolumeSpecName "kube-api-access-ffg8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.829442 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b62868b9-909f-4974-a624-796bfb6a2372" (UID: "b62868b9-909f-4974-a624-796bfb6a2372"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.833457 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-config" (OuterVolumeSpecName: "config") pod "b62868b9-909f-4974-a624-796bfb6a2372" (UID: "b62868b9-909f-4974-a624-796bfb6a2372"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.848549 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b62868b9-909f-4974-a624-796bfb6a2372" (UID: "b62868b9-909f-4974-a624-796bfb6a2372"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.854032 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b62868b9-909f-4974-a624-796bfb6a2372" (UID: "b62868b9-909f-4974-a624-796bfb6a2372"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.886694 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.886722 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffg8c\" (UniqueName: \"kubernetes.io/projected/b62868b9-909f-4974-a624-796bfb6a2372-kube-api-access-ffg8c\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.886734 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.886744 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.886753 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b62868b9-909f-4974-a624-796bfb6a2372-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.970047 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-x87xj"] Jun 06 09:29:57 crc kubenswrapper[4911]: I0606 09:29:57.970401 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mvtw8"] Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.004720 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844f6c47c-tc4ph"] Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.011463 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-844f6c47c-tc4ph"] Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.044768 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-qt79z"] Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.050582 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-xg7d9"] Jun 06 09:29:58 crc kubenswrapper[4911]: W0606 09:29:58.051206 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7cb25b1_db79_44b5_832b_bdeaf22c1c93.slice/crio-80e94dfe540b35f2948618e0b2bf389e053c176c32cb556538462a5fb50d870d WatchSource:0}: Error finding container 80e94dfe540b35f2948618e0b2bf389e053c176c32cb556538462a5fb50d870d: Status 404 returned error can't find the container with id 80e94dfe540b35f2948618e0b2bf389e053c176c32cb556538462a5fb50d870d Jun 06 09:29:58 crc kubenswrapper[4911]: W0606 09:29:58.058770 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddbdc914_9643_4c5f_ae96_c519a85f1088.slice/crio-7390bab662fc9b3b0f5d28740852d40942b4224eab5452a6e04e3e53857e02fd WatchSource:0}: Error finding container 7390bab662fc9b3b0f5d28740852d40942b4224eab5452a6e04e3e53857e02fd: Status 404 returned error can't find the container with id 7390bab662fc9b3b0f5d28740852d40942b4224eab5452a6e04e3e53857e02fd Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.667528 4911 generic.go:334] "Generic (PLEG): container finished" podID="ddbdc914-9643-4c5f-ae96-c519a85f1088" containerID="856ec54ea2059eebbe914f198d485a2b18fa9ddb8ea24febdd8e7ed08cb80f4a" exitCode=0 Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.667592 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-qt79z" event={"ID":"ddbdc914-9643-4c5f-ae96-c519a85f1088","Type":"ContainerDied","Data":"856ec54ea2059eebbe914f198d485a2b18fa9ddb8ea24febdd8e7ed08cb80f4a"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.667903 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-qt79z" event={"ID":"ddbdc914-9643-4c5f-ae96-c519a85f1088","Type":"ContainerStarted","Data":"7390bab662fc9b3b0f5d28740852d40942b4224eab5452a6e04e3e53857e02fd"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.671117 4911 generic.go:334] "Generic (PLEG): container finished" podID="ddb2c431-3ba8-4973-935f-a1922bf220e7" containerID="d7142c835952404819d9d029db1b60e1da9014272f881569004cde8c2756f1a1" exitCode=0 Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.671161 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mvtw8" event={"ID":"ddb2c431-3ba8-4973-935f-a1922bf220e7","Type":"ContainerDied","Data":"d7142c835952404819d9d029db1b60e1da9014272f881569004cde8c2756f1a1"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.671198 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mvtw8" event={"ID":"ddb2c431-3ba8-4973-935f-a1922bf220e7","Type":"ContainerStarted","Data":"e7eabf89da5b2eb6c786bf3767b8fb1e620778c3475e99267765acf0dd145c5b"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.673002 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9bdfb" event={"ID":"c0dd5a73-4e05-440f-90e7-f432562f3c3d","Type":"ContainerStarted","Data":"70ce0764fab7b3d425b2f2fbdcbc903d1ff831e7676cdc42946d25337c34bf72"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.677428 4911 generic.go:334] "Generic (PLEG): container finished" podID="30ec5b63-8433-471e-8a86-d21652dd0367" containerID="9b232eb244da3a38b119a4103d84dc2674cbb0148a18d069e7c4581e7289f8f0" exitCode=0 Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.677520 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-x87xj" event={"ID":"30ec5b63-8433-471e-8a86-d21652dd0367","Type":"ContainerDied","Data":"9b232eb244da3a38b119a4103d84dc2674cbb0148a18d069e7c4581e7289f8f0"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.677552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-x87xj" event={"ID":"30ec5b63-8433-471e-8a86-d21652dd0367","Type":"ContainerStarted","Data":"1c10c50301bb0f94ef58d469018aab07f32319b47363928e523682b535d50585"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.683239 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7cb25b1-db79-44b5-832b-bdeaf22c1c93" containerID="4b03684044688034919468ba18eabe35c026e00740058601862a5b57bded9866" exitCode=0 Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.683304 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-xg7d9" event={"ID":"d7cb25b1-db79-44b5-832b-bdeaf22c1c93","Type":"ContainerDied","Data":"4b03684044688034919468ba18eabe35c026e00740058601862a5b57bded9866"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.683337 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-xg7d9" event={"ID":"d7cb25b1-db79-44b5-832b-bdeaf22c1c93","Type":"ContainerStarted","Data":"80e94dfe540b35f2948618e0b2bf389e053c176c32cb556538462a5fb50d870d"} Jun 06 09:29:58 crc kubenswrapper[4911]: I0606 09:29:58.719897 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-9bdfb" podStartSLOduration=3.05449678 podStartE2EDuration="13.719878335s" podCreationTimestamp="2025-06-06 09:29:45 +0000 UTC" firstStartedPulling="2025-06-06 09:29:46.824935444 +0000 UTC m=+998.100360987" lastFinishedPulling="2025-06-06 09:29:57.490316999 +0000 UTC m=+1008.765742542" observedRunningTime="2025-06-06 09:29:58.719615938 +0000 UTC m=+1009.995041501" watchObservedRunningTime="2025-06-06 09:29:58.719878335 +0000 UTC m=+1009.995303878" Jun 06 09:29:59 crc kubenswrapper[4911]: I0606 09:29:59.965120 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b62868b9-909f-4974-a624-796bfb6a2372" path="/var/lib/kubelet/pods/b62868b9-909f-4974-a624-796bfb6a2372/volumes" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.069033 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xg7d9" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.146713 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn"] Jun 06 09:30:00 crc kubenswrapper[4911]: E0606 09:30:00.152131 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62868b9-909f-4974-a624-796bfb6a2372" containerName="dnsmasq-dns" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.152181 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62868b9-909f-4974-a624-796bfb6a2372" containerName="dnsmasq-dns" Jun 06 09:30:00 crc kubenswrapper[4911]: E0606 09:30:00.152210 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7cb25b1-db79-44b5-832b-bdeaf22c1c93" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.152219 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7cb25b1-db79-44b5-832b-bdeaf22c1c93" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: E0606 09:30:00.152237 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62868b9-909f-4974-a624-796bfb6a2372" containerName="init" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.152246 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62868b9-909f-4974-a624-796bfb6a2372" containerName="init" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.152561 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62868b9-909f-4974-a624-796bfb6a2372" containerName="dnsmasq-dns" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.152594 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7cb25b1-db79-44b5-832b-bdeaf22c1c93" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.153478 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.157484 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.157791 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.161771 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn"] Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.174237 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x87xj" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.185340 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mvtw8" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.202509 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-qt79z" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.238613 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45pwv\" (UniqueName: \"kubernetes.io/projected/d7cb25b1-db79-44b5-832b-bdeaf22c1c93-kube-api-access-45pwv\") pod \"d7cb25b1-db79-44b5-832b-bdeaf22c1c93\" (UID: \"d7cb25b1-db79-44b5-832b-bdeaf22c1c93\") " Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.239054 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92mn4\" (UniqueName: \"kubernetes.io/projected/5d2767cd-89b4-421b-86f0-c588fbd2dddf-kube-api-access-92mn4\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.239160 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2767cd-89b4-421b-86f0-c588fbd2dddf-secret-volume\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.239204 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2767cd-89b4-421b-86f0-c588fbd2dddf-config-volume\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.245377 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7cb25b1-db79-44b5-832b-bdeaf22c1c93-kube-api-access-45pwv" (OuterVolumeSpecName: "kube-api-access-45pwv") pod "d7cb25b1-db79-44b5-832b-bdeaf22c1c93" (UID: "d7cb25b1-db79-44b5-832b-bdeaf22c1c93"). InnerVolumeSpecName "kube-api-access-45pwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.340888 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vq58c\" (UniqueName: \"kubernetes.io/projected/ddb2c431-3ba8-4973-935f-a1922bf220e7-kube-api-access-vq58c\") pod \"ddb2c431-3ba8-4973-935f-a1922bf220e7\" (UID: \"ddb2c431-3ba8-4973-935f-a1922bf220e7\") " Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.341472 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d758\" (UniqueName: \"kubernetes.io/projected/ddbdc914-9643-4c5f-ae96-c519a85f1088-kube-api-access-9d758\") pod \"ddbdc914-9643-4c5f-ae96-c519a85f1088\" (UID: \"ddbdc914-9643-4c5f-ae96-c519a85f1088\") " Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.341527 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dm29\" (UniqueName: \"kubernetes.io/projected/30ec5b63-8433-471e-8a86-d21652dd0367-kube-api-access-5dm29\") pod \"30ec5b63-8433-471e-8a86-d21652dd0367\" (UID: \"30ec5b63-8433-471e-8a86-d21652dd0367\") " Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.341953 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92mn4\" (UniqueName: \"kubernetes.io/projected/5d2767cd-89b4-421b-86f0-c588fbd2dddf-kube-api-access-92mn4\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.342046 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2767cd-89b4-421b-86f0-c588fbd2dddf-secret-volume\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.342083 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2767cd-89b4-421b-86f0-c588fbd2dddf-config-volume\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.342267 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45pwv\" (UniqueName: \"kubernetes.io/projected/d7cb25b1-db79-44b5-832b-bdeaf22c1c93-kube-api-access-45pwv\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.343066 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2767cd-89b4-421b-86f0-c588fbd2dddf-config-volume\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.345536 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddb2c431-3ba8-4973-935f-a1922bf220e7-kube-api-access-vq58c" (OuterVolumeSpecName: "kube-api-access-vq58c") pod "ddb2c431-3ba8-4973-935f-a1922bf220e7" (UID: "ddb2c431-3ba8-4973-935f-a1922bf220e7"). InnerVolumeSpecName "kube-api-access-vq58c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.345916 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ec5b63-8433-471e-8a86-d21652dd0367-kube-api-access-5dm29" (OuterVolumeSpecName: "kube-api-access-5dm29") pod "30ec5b63-8433-471e-8a86-d21652dd0367" (UID: "30ec5b63-8433-471e-8a86-d21652dd0367"). InnerVolumeSpecName "kube-api-access-5dm29". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.346087 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddbdc914-9643-4c5f-ae96-c519a85f1088-kube-api-access-9d758" (OuterVolumeSpecName: "kube-api-access-9d758") pod "ddbdc914-9643-4c5f-ae96-c519a85f1088" (UID: "ddbdc914-9643-4c5f-ae96-c519a85f1088"). InnerVolumeSpecName "kube-api-access-9d758". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.347924 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2767cd-89b4-421b-86f0-c588fbd2dddf-secret-volume\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.361506 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92mn4\" (UniqueName: \"kubernetes.io/projected/5d2767cd-89b4-421b-86f0-c588fbd2dddf-kube-api-access-92mn4\") pod \"collect-profiles-29153370-68mhn\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.443845 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vq58c\" (UniqueName: \"kubernetes.io/projected/ddb2c431-3ba8-4973-935f-a1922bf220e7-kube-api-access-vq58c\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.443903 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d758\" (UniqueName: \"kubernetes.io/projected/ddbdc914-9643-4c5f-ae96-c519a85f1088-kube-api-access-9d758\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.443917 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dm29\" (UniqueName: \"kubernetes.io/projected/30ec5b63-8433-471e-8a86-d21652dd0367-kube-api-access-5dm29\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.526741 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.704345 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-scb57"] Jun 06 09:30:00 crc kubenswrapper[4911]: E0606 09:30:00.704753 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ec5b63-8433-471e-8a86-d21652dd0367" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.704776 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ec5b63-8433-471e-8a86-d21652dd0367" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: E0606 09:30:00.704813 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddb2c431-3ba8-4973-935f-a1922bf220e7" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.704822 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddb2c431-3ba8-4973-935f-a1922bf220e7" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: E0606 09:30:00.704839 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddbdc914-9643-4c5f-ae96-c519a85f1088" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.704848 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddbdc914-9643-4c5f-ae96-c519a85f1088" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.705035 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddb2c431-3ba8-4973-935f-a1922bf220e7" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.705118 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ec5b63-8433-471e-8a86-d21652dd0367" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.705146 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddbdc914-9643-4c5f-ae96-c519a85f1088" containerName="mariadb-database-create" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.705831 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.709811 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5n65" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.711363 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.711668 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.725277 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.727574 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-scb57"] Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.736353 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-qt79z" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.736434 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-qt79z" event={"ID":"ddbdc914-9643-4c5f-ae96-c519a85f1088","Type":"ContainerDied","Data":"7390bab662fc9b3b0f5d28740852d40942b4224eab5452a6e04e3e53857e02fd"} Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.736483 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7390bab662fc9b3b0f5d28740852d40942b4224eab5452a6e04e3e53857e02fd" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.739127 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mvtw8" event={"ID":"ddb2c431-3ba8-4973-935f-a1922bf220e7","Type":"ContainerDied","Data":"e7eabf89da5b2eb6c786bf3767b8fb1e620778c3475e99267765acf0dd145c5b"} Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.739170 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7eabf89da5b2eb6c786bf3767b8fb1e620778c3475e99267765acf0dd145c5b" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.739221 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mvtw8" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.741182 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-x87xj" event={"ID":"30ec5b63-8433-471e-8a86-d21652dd0367","Type":"ContainerDied","Data":"1c10c50301bb0f94ef58d469018aab07f32319b47363928e523682b535d50585"} Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.741217 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c10c50301bb0f94ef58d469018aab07f32319b47363928e523682b535d50585" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.741270 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-x87xj" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.742713 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-xg7d9" event={"ID":"d7cb25b1-db79-44b5-832b-bdeaf22c1c93","Type":"ContainerDied","Data":"80e94dfe540b35f2948618e0b2bf389e053c176c32cb556538462a5fb50d870d"} Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.742739 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80e94dfe540b35f2948618e0b2bf389e053c176c32cb556538462a5fb50d870d" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.742758 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-xg7d9" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.853514 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-config-data\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.853798 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lp78\" (UniqueName: \"kubernetes.io/projected/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-kube-api-access-6lp78\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.854053 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-combined-ca-bundle\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.955699 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-combined-ca-bundle\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.955789 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-config-data\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.955892 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lp78\" (UniqueName: \"kubernetes.io/projected/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-kube-api-access-6lp78\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.959913 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-combined-ca-bundle\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.960311 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-config-data\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:00 crc kubenswrapper[4911]: I0606 09:30:00.980134 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lp78\" (UniqueName: \"kubernetes.io/projected/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-kube-api-access-6lp78\") pod \"keystone-db-sync-scb57\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:01 crc kubenswrapper[4911]: I0606 09:30:01.009873 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn"] Jun 06 09:30:01 crc kubenswrapper[4911]: W0606 09:30:01.015871 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d2767cd_89b4_421b_86f0_c588fbd2dddf.slice/crio-5197fc369815f4b130ee12ec8d7049cc00fd02bcb7f29d901cc134f810391d63 WatchSource:0}: Error finding container 5197fc369815f4b130ee12ec8d7049cc00fd02bcb7f29d901cc134f810391d63: Status 404 returned error can't find the container with id 5197fc369815f4b130ee12ec8d7049cc00fd02bcb7f29d901cc134f810391d63 Jun 06 09:30:01 crc kubenswrapper[4911]: I0606 09:30:01.029405 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:01 crc kubenswrapper[4911]: I0606 09:30:01.510037 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-scb57"] Jun 06 09:30:01 crc kubenswrapper[4911]: I0606 09:30:01.751456 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-scb57" event={"ID":"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273","Type":"ContainerStarted","Data":"15017c4892a166892969938310785739d602beec17f2f5c8913ccf3500c75622"} Jun 06 09:30:01 crc kubenswrapper[4911]: I0606 09:30:01.753506 4911 generic.go:334] "Generic (PLEG): container finished" podID="5d2767cd-89b4-421b-86f0-c588fbd2dddf" containerID="e24afeff7b93f095d0b78ee9f6d835dad4a181acd727cc0fc9e0e56ddea9b8ee" exitCode=0 Jun 06 09:30:01 crc kubenswrapper[4911]: I0606 09:30:01.753539 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" event={"ID":"5d2767cd-89b4-421b-86f0-c588fbd2dddf","Type":"ContainerDied","Data":"e24afeff7b93f095d0b78ee9f6d835dad4a181acd727cc0fc9e0e56ddea9b8ee"} Jun 06 09:30:01 crc kubenswrapper[4911]: I0606 09:30:01.753573 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" event={"ID":"5d2767cd-89b4-421b-86f0-c588fbd2dddf","Type":"ContainerStarted","Data":"5197fc369815f4b130ee12ec8d7049cc00fd02bcb7f29d901cc134f810391d63"} Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.082401 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-pgk2s"] Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.083921 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.182948 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfhfh\" (UniqueName: \"kubernetes.io/projected/c8aebcd6-87e3-492d-8fcc-72ea894778a1-kube-api-access-vfhfh\") pod \"crc-debug-pgk2s\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.183536 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8aebcd6-87e3-492d-8fcc-72ea894778a1-host\") pod \"crc-debug-pgk2s\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.285371 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfhfh\" (UniqueName: \"kubernetes.io/projected/c8aebcd6-87e3-492d-8fcc-72ea894778a1-kube-api-access-vfhfh\") pod \"crc-debug-pgk2s\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.285470 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8aebcd6-87e3-492d-8fcc-72ea894778a1-host\") pod \"crc-debug-pgk2s\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.285640 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8aebcd6-87e3-492d-8fcc-72ea894778a1-host\") pod \"crc-debug-pgk2s\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.307438 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfhfh\" (UniqueName: \"kubernetes.io/projected/c8aebcd6-87e3-492d-8fcc-72ea894778a1-kube-api-access-vfhfh\") pod \"crc-debug-pgk2s\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.405524 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pgk2s" Jun 06 09:30:02 crc kubenswrapper[4911]: W0606 09:30:02.455439 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8aebcd6_87e3_492d_8fcc_72ea894778a1.slice/crio-8203bc4dbc147705f7911e048e47b7d64930555c8c284efde2bcd91111573360 WatchSource:0}: Error finding container 8203bc4dbc147705f7911e048e47b7d64930555c8c284efde2bcd91111573360: Status 404 returned error can't find the container with id 8203bc4dbc147705f7911e048e47b7d64930555c8c284efde2bcd91111573360 Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.763767 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-pgk2s" event={"ID":"c8aebcd6-87e3-492d-8fcc-72ea894778a1","Type":"ContainerStarted","Data":"51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575"} Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.764229 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-pgk2s" event={"ID":"c8aebcd6-87e3-492d-8fcc-72ea894778a1","Type":"ContainerStarted","Data":"8203bc4dbc147705f7911e048e47b7d64930555c8c284efde2bcd91111573360"} Jun 06 09:30:02 crc kubenswrapper[4911]: I0606 09:30:02.788324 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-pgk2s" podStartSLOduration=0.788300323 podStartE2EDuration="788.300323ms" podCreationTimestamp="2025-06-06 09:30:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:02.778157723 +0000 UTC m=+1014.053583286" watchObservedRunningTime="2025-06-06 09:30:02.788300323 +0000 UTC m=+1014.063725866" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.046338 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.201657 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2767cd-89b4-421b-86f0-c588fbd2dddf-secret-volume\") pod \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.201774 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2767cd-89b4-421b-86f0-c588fbd2dddf-config-volume\") pod \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.201927 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92mn4\" (UniqueName: \"kubernetes.io/projected/5d2767cd-89b4-421b-86f0-c588fbd2dddf-kube-api-access-92mn4\") pod \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\" (UID: \"5d2767cd-89b4-421b-86f0-c588fbd2dddf\") " Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.202923 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d2767cd-89b4-421b-86f0-c588fbd2dddf-config-volume" (OuterVolumeSpecName: "config-volume") pod "5d2767cd-89b4-421b-86f0-c588fbd2dddf" (UID: "5d2767cd-89b4-421b-86f0-c588fbd2dddf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.209994 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d2767cd-89b4-421b-86f0-c588fbd2dddf-kube-api-access-92mn4" (OuterVolumeSpecName: "kube-api-access-92mn4") pod "5d2767cd-89b4-421b-86f0-c588fbd2dddf" (UID: "5d2767cd-89b4-421b-86f0-c588fbd2dddf"). InnerVolumeSpecName "kube-api-access-92mn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.212301 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2767cd-89b4-421b-86f0-c588fbd2dddf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5d2767cd-89b4-421b-86f0-c588fbd2dddf" (UID: "5d2767cd-89b4-421b-86f0-c588fbd2dddf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.303598 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5d2767cd-89b4-421b-86f0-c588fbd2dddf-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.303645 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92mn4\" (UniqueName: \"kubernetes.io/projected/5d2767cd-89b4-421b-86f0-c588fbd2dddf-kube-api-access-92mn4\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.303659 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5d2767cd-89b4-421b-86f0-c588fbd2dddf-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.774285 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" event={"ID":"5d2767cd-89b4-421b-86f0-c588fbd2dddf","Type":"ContainerDied","Data":"5197fc369815f4b130ee12ec8d7049cc00fd02bcb7f29d901cc134f810391d63"} Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.774581 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5197fc369815f4b130ee12ec8d7049cc00fd02bcb7f29d901cc134f810391d63" Jun 06 09:30:03 crc kubenswrapper[4911]: I0606 09:30:03.774417 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn" Jun 06 09:30:05 crc kubenswrapper[4911]: I0606 09:30:05.793192 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0dd5a73-4e05-440f-90e7-f432562f3c3d" containerID="70ce0764fab7b3d425b2f2fbdcbc903d1ff831e7676cdc42946d25337c34bf72" exitCode=0 Jun 06 09:30:05 crc kubenswrapper[4911]: I0606 09:30:05.793285 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9bdfb" event={"ID":"c0dd5a73-4e05-440f-90e7-f432562f3c3d","Type":"ContainerDied","Data":"70ce0764fab7b3d425b2f2fbdcbc903d1ff831e7676cdc42946d25337c34bf72"} Jun 06 09:30:05 crc kubenswrapper[4911]: I0606 09:30:05.796886 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-scb57" event={"ID":"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273","Type":"ContainerStarted","Data":"1ae8b3b8370df73982d9226b37541aeeb89e407c696e939229bd306f6ae69751"} Jun 06 09:30:05 crc kubenswrapper[4911]: I0606 09:30:05.829797 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-scb57" podStartSLOduration=1.847156178 podStartE2EDuration="5.829779909s" podCreationTimestamp="2025-06-06 09:30:00 +0000 UTC" firstStartedPulling="2025-06-06 09:30:01.569762819 +0000 UTC m=+1012.845188362" lastFinishedPulling="2025-06-06 09:30:05.55238655 +0000 UTC m=+1016.827812093" observedRunningTime="2025-06-06 09:30:05.824653868 +0000 UTC m=+1017.100079421" watchObservedRunningTime="2025-06-06 09:30:05.829779909 +0000 UTC m=+1017.105205452" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.199963 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9bdfb" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.382080 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-combined-ca-bundle\") pod \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.382211 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-config-data\") pod \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.382235 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-db-sync-config-data\") pod \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.382308 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67nhg\" (UniqueName: \"kubernetes.io/projected/c0dd5a73-4e05-440f-90e7-f432562f3c3d-kube-api-access-67nhg\") pod \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\" (UID: \"c0dd5a73-4e05-440f-90e7-f432562f3c3d\") " Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.388565 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0dd5a73-4e05-440f-90e7-f432562f3c3d-kube-api-access-67nhg" (OuterVolumeSpecName: "kube-api-access-67nhg") pod "c0dd5a73-4e05-440f-90e7-f432562f3c3d" (UID: "c0dd5a73-4e05-440f-90e7-f432562f3c3d"). InnerVolumeSpecName "kube-api-access-67nhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.389273 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c0dd5a73-4e05-440f-90e7-f432562f3c3d" (UID: "c0dd5a73-4e05-440f-90e7-f432562f3c3d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.407800 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0dd5a73-4e05-440f-90e7-f432562f3c3d" (UID: "c0dd5a73-4e05-440f-90e7-f432562f3c3d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.427893 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-config-data" (OuterVolumeSpecName: "config-data") pod "c0dd5a73-4e05-440f-90e7-f432562f3c3d" (UID: "c0dd5a73-4e05-440f-90e7-f432562f3c3d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.483867 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.483908 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.483926 4911 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c0dd5a73-4e05-440f-90e7-f432562f3c3d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.483938 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67nhg\" (UniqueName: \"kubernetes.io/projected/c0dd5a73-4e05-440f-90e7-f432562f3c3d-kube-api-access-67nhg\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.811331 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9bdfb" event={"ID":"c0dd5a73-4e05-440f-90e7-f432562f3c3d","Type":"ContainerDied","Data":"58963808f94d84baed02c3dbec9c00bda39e5c321d53de8e49923c6357e41316"} Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.811368 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58963808f94d84baed02c3dbec9c00bda39e5c321d53de8e49923c6357e41316" Jun 06 09:30:07 crc kubenswrapper[4911]: I0606 09:30:07.811391 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9bdfb" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.171738 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d99f9bd59-ss24w"] Jun 06 09:30:08 crc kubenswrapper[4911]: E0606 09:30:08.172452 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d2767cd-89b4-421b-86f0-c588fbd2dddf" containerName="collect-profiles" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.172479 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d2767cd-89b4-421b-86f0-c588fbd2dddf" containerName="collect-profiles" Jun 06 09:30:08 crc kubenswrapper[4911]: E0606 09:30:08.172590 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0dd5a73-4e05-440f-90e7-f432562f3c3d" containerName="glance-db-sync" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.172616 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0dd5a73-4e05-440f-90e7-f432562f3c3d" containerName="glance-db-sync" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.173583 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d2767cd-89b4-421b-86f0-c588fbd2dddf" containerName="collect-profiles" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.173671 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0dd5a73-4e05-440f-90e7-f432562f3c3d" containerName="glance-db-sync" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.175043 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.202289 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d99f9bd59-ss24w"] Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.298156 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22xr8\" (UniqueName: \"kubernetes.io/projected/1806d366-b45d-449a-8674-d9c2ff2e6740-kube-api-access-22xr8\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.298230 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-swift-storage-0\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.298274 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-sb\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.298291 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-config\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.298349 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-svc\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.298382 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-nb\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.399935 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-config\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.399997 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-svc\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.400043 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-nb\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.400141 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22xr8\" (UniqueName: \"kubernetes.io/projected/1806d366-b45d-449a-8674-d9c2ff2e6740-kube-api-access-22xr8\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.400185 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-swift-storage-0\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.400237 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-sb\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.401024 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-sb\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.401620 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-config\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.402512 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-nb\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.402584 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-swift-storage-0\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.403399 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-svc\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.442710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22xr8\" (UniqueName: \"kubernetes.io/projected/1806d366-b45d-449a-8674-d9c2ff2e6740-kube-api-access-22xr8\") pod \"dnsmasq-dns-5d99f9bd59-ss24w\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.507061 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:08 crc kubenswrapper[4911]: I0606 09:30:08.977502 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d99f9bd59-ss24w"] Jun 06 09:30:09 crc kubenswrapper[4911]: I0606 09:30:09.830781 4911 generic.go:334] "Generic (PLEG): container finished" podID="2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" containerID="1ae8b3b8370df73982d9226b37541aeeb89e407c696e939229bd306f6ae69751" exitCode=0 Jun 06 09:30:09 crc kubenswrapper[4911]: I0606 09:30:09.831136 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-scb57" event={"ID":"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273","Type":"ContainerDied","Data":"1ae8b3b8370df73982d9226b37541aeeb89e407c696e939229bd306f6ae69751"} Jun 06 09:30:09 crc kubenswrapper[4911]: I0606 09:30:09.832811 4911 generic.go:334] "Generic (PLEG): container finished" podID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerID="6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953" exitCode=0 Jun 06 09:30:09 crc kubenswrapper[4911]: I0606 09:30:09.832831 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" event={"ID":"1806d366-b45d-449a-8674-d9c2ff2e6740","Type":"ContainerDied","Data":"6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953"} Jun 06 09:30:09 crc kubenswrapper[4911]: I0606 09:30:09.832846 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" event={"ID":"1806d366-b45d-449a-8674-d9c2ff2e6740","Type":"ContainerStarted","Data":"a886f8dba5976ac6fcc2a948631728d30f3262e7f6a10fa6b7dc6ae1f042bae4"} Jun 06 09:30:10 crc kubenswrapper[4911]: I0606 09:30:10.844166 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" event={"ID":"1806d366-b45d-449a-8674-d9c2ff2e6740","Type":"ContainerStarted","Data":"10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565"} Jun 06 09:30:10 crc kubenswrapper[4911]: I0606 09:30:10.844475 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:10 crc kubenswrapper[4911]: I0606 09:30:10.875209 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" podStartSLOduration=2.87518666 podStartE2EDuration="2.87518666s" podCreationTimestamp="2025-06-06 09:30:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:10.866757465 +0000 UTC m=+1022.142183008" watchObservedRunningTime="2025-06-06 09:30:10.87518666 +0000 UTC m=+1022.150612203" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.146429 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.246077 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-combined-ca-bundle\") pod \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.246173 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-config-data\") pod \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.246234 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lp78\" (UniqueName: \"kubernetes.io/projected/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-kube-api-access-6lp78\") pod \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\" (UID: \"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273\") " Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.251157 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-kube-api-access-6lp78" (OuterVolumeSpecName: "kube-api-access-6lp78") pod "2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" (UID: "2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273"). InnerVolumeSpecName "kube-api-access-6lp78". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.276838 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" (UID: "2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.294747 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-config-data" (OuterVolumeSpecName: "config-data") pod "2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" (UID: "2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.348945 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.348987 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.348999 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lp78\" (UniqueName: \"kubernetes.io/projected/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273-kube-api-access-6lp78\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.852566 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-scb57" Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.852571 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-scb57" event={"ID":"2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273","Type":"ContainerDied","Data":"15017c4892a166892969938310785739d602beec17f2f5c8913ccf3500c75622"} Jun 06 09:30:11 crc kubenswrapper[4911]: I0606 09:30:11.853775 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15017c4892a166892969938310785739d602beec17f2f5c8913ccf3500c75622" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.109800 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d99f9bd59-ss24w"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.146625 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f46b79579-8wnpv"] Jun 06 09:30:12 crc kubenswrapper[4911]: E0606 09:30:12.147260 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" containerName="keystone-db-sync" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.147289 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" containerName="keystone-db-sync" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.147543 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" containerName="keystone-db-sync" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.148751 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.162125 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-zb42f"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.163682 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.165866 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.166064 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5n65" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.166485 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.166818 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.176050 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f46b79579-8wnpv"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.195715 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zb42f"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.264888 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-sb\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.264942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-scripts\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.264980 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-config\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265025 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-swift-storage-0\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265085 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-nb\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265122 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt56s\" (UniqueName: \"kubernetes.io/projected/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-kube-api-access-jt56s\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265201 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-config-data\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265245 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57424\" (UniqueName: \"kubernetes.io/projected/4a1fccf4-006c-45bd-852d-775a4647b177-kube-api-access-57424\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265265 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-credential-keys\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265282 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-svc\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265313 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-fernet-keys\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.265329 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-combined-ca-bundle\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.317331 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.319695 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.321761 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.322017 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.336007 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366713 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-config\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366770 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-run-httpd\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366800 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366846 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-swift-storage-0\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366875 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-nb\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366904 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt56s\" (UniqueName: \"kubernetes.io/projected/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-kube-api-access-jt56s\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366948 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-config-data\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.366997 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57424\" (UniqueName: \"kubernetes.io/projected/4a1fccf4-006c-45bd-852d-775a4647b177-kube-api-access-57424\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-credential-keys\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367045 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-svc\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367069 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367140 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-log-httpd\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367173 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-fernet-keys\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367195 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-combined-ca-bundle\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367222 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-sb\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367243 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s58v5\" (UniqueName: \"kubernetes.io/projected/a22e427e-b12f-479a-bb38-e83fa482a724-kube-api-access-s58v5\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367269 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-scripts\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367293 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-scripts\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.367320 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-config-data\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.369469 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-config\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.369860 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-svc\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.370929 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-nb\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.371547 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-swift-storage-0\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.371705 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-sb\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.374282 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-scripts\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.374696 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-combined-ca-bundle\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.375844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-fernet-keys\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.376558 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-credential-keys\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.383168 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-config-data\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.408528 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57424\" (UniqueName: \"kubernetes.io/projected/4a1fccf4-006c-45bd-852d-775a4647b177-kube-api-access-57424\") pod \"dnsmasq-dns-5f46b79579-8wnpv\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.426797 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt56s\" (UniqueName: \"kubernetes.io/projected/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-kube-api-access-jt56s\") pod \"keystone-bootstrap-zb42f\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469307 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-log-httpd\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469380 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s58v5\" (UniqueName: \"kubernetes.io/projected/a22e427e-b12f-479a-bb38-e83fa482a724-kube-api-access-s58v5\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469411 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-scripts\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469440 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-config-data\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469487 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-run-httpd\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469511 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469611 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.469908 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-log-httpd\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.470114 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-run-httpd\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.477532 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-scripts\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.477586 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.478363 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-config-data\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.478920 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.488026 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.493621 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.496888 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s58v5\" (UniqueName: \"kubernetes.io/projected/a22e427e-b12f-479a-bb38-e83fa482a724-kube-api-access-s58v5\") pod \"ceilometer-0\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.535631 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f46b79579-8wnpv"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.546154 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7854d9c8ff-575q2"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.553758 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.587175 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-grtxs"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.589402 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.600627 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.600875 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.601085 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qvqnr" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.616056 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7854d9c8ff-575q2"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.630447 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-grtxs"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.666416 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673320 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-logs\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673366 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-sb\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673396 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8tfc\" (UniqueName: \"kubernetes.io/projected/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-kube-api-access-x8tfc\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673419 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-scripts\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673449 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-config-data\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673477 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-config\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673506 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-combined-ca-bundle\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673549 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-swift-storage-0\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673611 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-nb\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673669 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-svc\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.673696 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sx4m\" (UniqueName: \"kubernetes.io/projected/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-kube-api-access-2sx4m\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.715137 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-0587-account-create-5ljgg"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.716867 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0587-account-create-5ljgg" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.720989 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.723917 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0587-account-create-5ljgg"] Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776364 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-svc\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776704 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sx4m\" (UniqueName: \"kubernetes.io/projected/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-kube-api-access-2sx4m\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776749 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-logs\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-sb\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776788 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8tfc\" (UniqueName: \"kubernetes.io/projected/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-kube-api-access-x8tfc\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776810 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-scripts\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776839 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dwx5\" (UniqueName: \"kubernetes.io/projected/91e36afb-76a0-41d2-85d2-716055b255a6-kube-api-access-8dwx5\") pod \"cinder-0587-account-create-5ljgg\" (UID: \"91e36afb-76a0-41d2-85d2-716055b255a6\") " pod="openstack/cinder-0587-account-create-5ljgg" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776866 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-config-data\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776892 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-config\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776914 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-combined-ca-bundle\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.776963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-swift-storage-0\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.777086 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-nb\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.778329 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-nb\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.778875 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-logs\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.779202 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-config\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.779318 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-sb\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.780251 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-svc\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.780402 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-swift-storage-0\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.786136 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-scripts\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.789611 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-combined-ca-bundle\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.803174 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-config-data\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.804636 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sx4m\" (UniqueName: \"kubernetes.io/projected/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-kube-api-access-2sx4m\") pod \"placement-db-sync-grtxs\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:12 crc kubenswrapper[4911]: I0606 09:30:12.805250 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8tfc\" (UniqueName: \"kubernetes.io/projected/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-kube-api-access-x8tfc\") pod \"dnsmasq-dns-7854d9c8ff-575q2\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.878507 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dwx5\" (UniqueName: \"kubernetes.io/projected/91e36afb-76a0-41d2-85d2-716055b255a6-kube-api-access-8dwx5\") pod \"cinder-0587-account-create-5ljgg\" (UID: \"91e36afb-76a0-41d2-85d2-716055b255a6\") " pod="openstack/cinder-0587-account-create-5ljgg" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.904287 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dwx5\" (UniqueName: \"kubernetes.io/projected/91e36afb-76a0-41d2-85d2-716055b255a6-kube-api-access-8dwx5\") pod \"cinder-0587-account-create-5ljgg\" (UID: \"91e36afb-76a0-41d2-85d2-716055b255a6\") " pod="openstack/cinder-0587-account-create-5ljgg" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.907147 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.915226 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-11ea-account-create-k549j"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.917814 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-11ea-account-create-k549j" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.918891 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.924078 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.927414 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-11ea-account-create-k549j"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:12.982696 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-457bm\" (UniqueName: \"kubernetes.io/projected/368d0127-18f0-4227-b93f-67c3df9d17af-kube-api-access-457bm\") pod \"barbican-11ea-account-create-k549j\" (UID: \"368d0127-18f0-4227-b93f-67c3df9d17af\") " pod="openstack/barbican-11ea-account-create-k549j" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.022912 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-0561-account-create-6k25c"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.024337 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-0561-account-create-6k25c" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.027374 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.030704 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-0561-account-create-6k25c"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.039944 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0587-account-create-5ljgg" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.088010 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-457bm\" (UniqueName: \"kubernetes.io/projected/368d0127-18f0-4227-b93f-67c3df9d17af-kube-api-access-457bm\") pod \"barbican-11ea-account-create-k549j\" (UID: \"368d0127-18f0-4227-b93f-67c3df9d17af\") " pod="openstack/barbican-11ea-account-create-k549j" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.089677 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdvtm\" (UniqueName: \"kubernetes.io/projected/9dd0a8e8-2790-448d-85ed-753fe1389bab-kube-api-access-vdvtm\") pod \"manila-0561-account-create-6k25c\" (UID: \"9dd0a8e8-2790-448d-85ed-753fe1389bab\") " pod="openstack/manila-0561-account-create-6k25c" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.120241 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-pgk2s"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.120473 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-pgk2s" podUID="c8aebcd6-87e3-492d-8fcc-72ea894778a1" containerName="container-00" containerID="cri-o://51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575" gracePeriod=2 Jun 06 09:30:13 crc kubenswrapper[4911]: W0606 09:30:13.120751 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a1fccf4_006c_45bd_852d_775a4647b177.slice/crio-e5e726cacffe1748a16053bd47efb9c1ff2a67e4d5cb3256256d3866af0ece0f WatchSource:0}: Error finding container e5e726cacffe1748a16053bd47efb9c1ff2a67e4d5cb3256256d3866af0ece0f: Status 404 returned error can't find the container with id e5e726cacffe1748a16053bd47efb9c1ff2a67e4d5cb3256256d3866af0ece0f Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.128843 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-pgk2s"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.130001 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-457bm\" (UniqueName: \"kubernetes.io/projected/368d0127-18f0-4227-b93f-67c3df9d17af-kube-api-access-457bm\") pod \"barbican-11ea-account-create-k549j\" (UID: \"368d0127-18f0-4227-b93f-67c3df9d17af\") " pod="openstack/barbican-11ea-account-create-k549j" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.137281 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f46b79579-8wnpv"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.145513 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.157460 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pgk2s" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.189823 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zb42f"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.190660 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfhfh\" (UniqueName: \"kubernetes.io/projected/c8aebcd6-87e3-492d-8fcc-72ea894778a1-kube-api-access-vfhfh\") pod \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.191628 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8aebcd6-87e3-492d-8fcc-72ea894778a1-host\") pod \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\" (UID: \"c8aebcd6-87e3-492d-8fcc-72ea894778a1\") " Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.191990 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdvtm\" (UniqueName: \"kubernetes.io/projected/9dd0a8e8-2790-448d-85ed-753fe1389bab-kube-api-access-vdvtm\") pod \"manila-0561-account-create-6k25c\" (UID: \"9dd0a8e8-2790-448d-85ed-753fe1389bab\") " pod="openstack/manila-0561-account-create-6k25c" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.192063 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c8aebcd6-87e3-492d-8fcc-72ea894778a1-host" (OuterVolumeSpecName: "host") pod "c8aebcd6-87e3-492d-8fcc-72ea894778a1" (UID: "c8aebcd6-87e3-492d-8fcc-72ea894778a1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.202173 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8aebcd6-87e3-492d-8fcc-72ea894778a1-kube-api-access-vfhfh" (OuterVolumeSpecName: "kube-api-access-vfhfh") pod "c8aebcd6-87e3-492d-8fcc-72ea894778a1" (UID: "c8aebcd6-87e3-492d-8fcc-72ea894778a1"). InnerVolumeSpecName "kube-api-access-vfhfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.209834 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5ed0-account-create-kt7pc"] Jun 06 09:30:13 crc kubenswrapper[4911]: E0606 09:30:13.211262 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8aebcd6-87e3-492d-8fcc-72ea894778a1" containerName="container-00" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.211280 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8aebcd6-87e3-492d-8fcc-72ea894778a1" containerName="container-00" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.211473 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8aebcd6-87e3-492d-8fcc-72ea894778a1" containerName="container-00" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.214417 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ed0-account-create-kt7pc" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.221801 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.230862 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdvtm\" (UniqueName: \"kubernetes.io/projected/9dd0a8e8-2790-448d-85ed-753fe1389bab-kube-api-access-vdvtm\") pod \"manila-0561-account-create-6k25c\" (UID: \"9dd0a8e8-2790-448d-85ed-753fe1389bab\") " pod="openstack/manila-0561-account-create-6k25c" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.232700 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ed0-account-create-kt7pc"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.249696 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-11ea-account-create-k549j" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.260731 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.264608 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.267739 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.267933 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.268170 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-z87sl" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.268312 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.268623 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.287452 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.293732 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-logs\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.293837 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-ceph\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.293864 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.293893 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-scripts\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.293925 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.293987 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-config-data\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.294025 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtcr2\" (UniqueName: \"kubernetes.io/projected/9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e-kube-api-access-xtcr2\") pod \"neutron-5ed0-account-create-kt7pc\" (UID: \"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e\") " pod="openstack/neutron-5ed0-account-create-kt7pc" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.294079 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.294156 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw9cz\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-kube-api-access-mw9cz\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.294182 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.294245 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8aebcd6-87e3-492d-8fcc-72ea894778a1-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.294261 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfhfh\" (UniqueName: \"kubernetes.io/projected/c8aebcd6-87e3-492d-8fcc-72ea894778a1-kube-api-access-vfhfh\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.326527 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.328326 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.331192 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.331435 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.335194 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396313 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396374 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw9cz\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-kube-api-access-mw9cz\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396402 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396435 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-logs\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396497 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-ceph\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396518 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396547 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-scripts\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396574 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396623 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-config-data\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.396650 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtcr2\" (UniqueName: \"kubernetes.io/projected/9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e-kube-api-access-xtcr2\") pod \"neutron-5ed0-account-create-kt7pc\" (UID: \"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e\") " pod="openstack/neutron-5ed0-account-create-kt7pc" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.398516 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.398559 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.399221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-logs\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.406530 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-config-data\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.408580 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-scripts\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.418944 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtcr2\" (UniqueName: \"kubernetes.io/projected/9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e-kube-api-access-xtcr2\") pod \"neutron-5ed0-account-create-kt7pc\" (UID: \"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e\") " pod="openstack/neutron-5ed0-account-create-kt7pc" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.419209 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.420391 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.422190 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw9cz\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-kube-api-access-mw9cz\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.425152 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-0561-account-create-6k25c" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.426768 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-ceph\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.466519 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502071 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502164 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llp5z\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-kube-api-access-llp5z\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502261 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502308 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502358 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502413 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502440 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-logs\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502567 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.502617 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.544014 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ed0-account-create-kt7pc" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.589235 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604085 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604156 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604229 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604251 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llp5z\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-kube-api-access-llp5z\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604286 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604307 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604331 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604357 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604377 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-logs\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.604991 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.605239 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-logs\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.605271 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.611427 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.613690 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.620416 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.620657 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.620734 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.624870 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llp5z\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-kube-api-access-llp5z\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.634885 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.712212 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.913790 4911 generic.go:334] "Generic (PLEG): container finished" podID="4a1fccf4-006c-45bd-852d-775a4647b177" containerID="fcf7959bb88dde2881a9a16fd9bf9634d6b4819dadc67bfc32ba8df41507bdd0" exitCode=0 Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.914265 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" event={"ID":"4a1fccf4-006c-45bd-852d-775a4647b177","Type":"ContainerDied","Data":"fcf7959bb88dde2881a9a16fd9bf9634d6b4819dadc67bfc32ba8df41507bdd0"} Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.914329 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" event={"ID":"4a1fccf4-006c-45bd-852d-775a4647b177","Type":"ContainerStarted","Data":"e5e726cacffe1748a16053bd47efb9c1ff2a67e4d5cb3256256d3866af0ece0f"} Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.916152 4911 generic.go:334] "Generic (PLEG): container finished" podID="c8aebcd6-87e3-492d-8fcc-72ea894778a1" containerID="51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575" exitCode=0 Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.916241 4911 scope.go:117] "RemoveContainer" containerID="51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.916197 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pgk2s" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.917984 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerStarted","Data":"d30b2be46243b07374fac8e777ce0a0d21fc96f02f71a8b041f531611f75f2d3"} Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.919936 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zb42f" event={"ID":"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d","Type":"ContainerStarted","Data":"ea065b61e819ce6b2afc0059fe4008cefcc9a3619ab6fe8dba72ce28afdb0218"} Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.919974 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zb42f" event={"ID":"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d","Type":"ContainerStarted","Data":"aff27e5883f1e1ee93cb515ddf7556ed5cbd9f111191162753a13c8ea84a784c"} Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.920169 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" podUID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerName="dnsmasq-dns" containerID="cri-o://10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565" gracePeriod=10 Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.944589 4911 scope.go:117] "RemoveContainer" containerID="51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575" Jun 06 09:30:13 crc kubenswrapper[4911]: E0606 09:30:13.946299 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575\": container with ID starting with 51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575 not found: ID does not exist" containerID="51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.946364 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575"} err="failed to get container status \"51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575\": rpc error: code = NotFound desc = could not find container \"51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575\": container with ID starting with 51a9733775bdf3047e4a5b2c1c724eb9a795b0e67cb702a5113a96b81322e575 not found: ID does not exist" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.964321 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8aebcd6-87e3-492d-8fcc-72ea894778a1" path="/var/lib/kubelet/pods/c8aebcd6-87e3-492d-8fcc-72ea894778a1/volumes" Jun 06 09:30:13 crc kubenswrapper[4911]: I0606 09:30:13.976113 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-zb42f" podStartSLOduration=1.976075787 podStartE2EDuration="1.976075787s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:13.971449229 +0000 UTC m=+1025.246874772" watchObservedRunningTime="2025-06-06 09:30:13.976075787 +0000 UTC m=+1025.251501330" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.261449 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-11ea-account-create-k549j"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.287459 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-0561-account-create-6k25c"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.307594 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ed0-account-create-kt7pc"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.351539 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7854d9c8ff-575q2"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.387032 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-grtxs"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.463837 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0587-account-create-5ljgg"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.474265 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.561259 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.601020 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.646296 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.659932 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.673908 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.741954 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-sb\") pod \"4a1fccf4-006c-45bd-852d-775a4647b177\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.742078 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-swift-storage-0\") pod \"4a1fccf4-006c-45bd-852d-775a4647b177\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.742156 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-svc\") pod \"4a1fccf4-006c-45bd-852d-775a4647b177\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.742205 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-nb\") pod \"4a1fccf4-006c-45bd-852d-775a4647b177\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.742246 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57424\" (UniqueName: \"kubernetes.io/projected/4a1fccf4-006c-45bd-852d-775a4647b177-kube-api-access-57424\") pod \"4a1fccf4-006c-45bd-852d-775a4647b177\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.742278 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-config\") pod \"4a1fccf4-006c-45bd-852d-775a4647b177\" (UID: \"4a1fccf4-006c-45bd-852d-775a4647b177\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.756306 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a1fccf4-006c-45bd-852d-775a4647b177-kube-api-access-57424" (OuterVolumeSpecName: "kube-api-access-57424") pod "4a1fccf4-006c-45bd-852d-775a4647b177" (UID: "4a1fccf4-006c-45bd-852d-775a4647b177"). InnerVolumeSpecName "kube-api-access-57424". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.771201 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4a1fccf4-006c-45bd-852d-775a4647b177" (UID: "4a1fccf4-006c-45bd-852d-775a4647b177"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.777399 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4a1fccf4-006c-45bd-852d-775a4647b177" (UID: "4a1fccf4-006c-45bd-852d-775a4647b177"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.779724 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4a1fccf4-006c-45bd-852d-775a4647b177" (UID: "4a1fccf4-006c-45bd-852d-775a4647b177"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.780488 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.792354 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4a1fccf4-006c-45bd-852d-775a4647b177" (UID: "4a1fccf4-006c-45bd-852d-775a4647b177"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.803063 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-config" (OuterVolumeSpecName: "config") pod "4a1fccf4-006c-45bd-852d-775a4647b177" (UID: "4a1fccf4-006c-45bd-852d-775a4647b177"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.844531 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.844595 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.844610 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.844621 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57424\" (UniqueName: \"kubernetes.io/projected/4a1fccf4-006c-45bd-852d-775a4647b177-kube-api-access-57424\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.844636 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.844647 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a1fccf4-006c-45bd-852d-775a4647b177-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.936511 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-0561-account-create-6k25c" event={"ID":"9dd0a8e8-2790-448d-85ed-753fe1389bab","Type":"ContainerStarted","Data":"1b2c399716ad1cfdc7242e7926cdecd5413c8c62c5920e9e9f67ff4276ff5b95"} Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.936808 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-0561-account-create-6k25c" event={"ID":"9dd0a8e8-2790-448d-85ed-753fe1389bab","Type":"ContainerStarted","Data":"d90dd2fffa30b635a57f8028e04275705611a214b340ca9b285dfc446e217503"} Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.945888 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-config\") pod \"1806d366-b45d-449a-8674-d9c2ff2e6740\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.945961 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-sb\") pod \"1806d366-b45d-449a-8674-d9c2ff2e6740\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.946134 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22xr8\" (UniqueName: \"kubernetes.io/projected/1806d366-b45d-449a-8674-d9c2ff2e6740-kube-api-access-22xr8\") pod \"1806d366-b45d-449a-8674-d9c2ff2e6740\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.946167 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-svc\") pod \"1806d366-b45d-449a-8674-d9c2ff2e6740\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.946191 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-nb\") pod \"1806d366-b45d-449a-8674-d9c2ff2e6740\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.946248 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-swift-storage-0\") pod \"1806d366-b45d-449a-8674-d9c2ff2e6740\" (UID: \"1806d366-b45d-449a-8674-d9c2ff2e6740\") " Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.948025 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-grtxs" event={"ID":"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099","Type":"ContainerStarted","Data":"1b0f0b29a3b1008d8f85d3742f33d72c1789ff931bc0d76b364d09b81fcf538b"} Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.959543 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1806d366-b45d-449a-8674-d9c2ff2e6740-kube-api-access-22xr8" (OuterVolumeSpecName: "kube-api-access-22xr8") pod "1806d366-b45d-449a-8674-d9c2ff2e6740" (UID: "1806d366-b45d-449a-8674-d9c2ff2e6740"). InnerVolumeSpecName "kube-api-access-22xr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.964727 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" event={"ID":"4a1fccf4-006c-45bd-852d-775a4647b177","Type":"ContainerDied","Data":"e5e726cacffe1748a16053bd47efb9c1ff2a67e4d5cb3256256d3866af0ece0f"} Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.964787 4911 scope.go:117] "RemoveContainer" containerID="fcf7959bb88dde2881a9a16fd9bf9634d6b4819dadc67bfc32ba8df41507bdd0" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.964735 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f46b79579-8wnpv" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.971475 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-0561-account-create-6k25c" podStartSLOduration=2.971456781 podStartE2EDuration="2.971456781s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:14.954166358 +0000 UTC m=+1026.229591911" watchObservedRunningTime="2025-06-06 09:30:14.971456781 +0000 UTC m=+1026.246882324" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.972928 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-11ea-account-create-k549j" event={"ID":"368d0127-18f0-4227-b93f-67c3df9d17af","Type":"ContainerStarted","Data":"8a39a20c4317be4d10b24abc5b99368136a690b1e7aaf7e48a5c574f49d789a0"} Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.978252 4911 generic.go:334] "Generic (PLEG): container finished" podID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerID="10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565" exitCode=0 Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.978336 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" event={"ID":"1806d366-b45d-449a-8674-d9c2ff2e6740","Type":"ContainerDied","Data":"10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565"} Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.978367 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" event={"ID":"1806d366-b45d-449a-8674-d9c2ff2e6740","Type":"ContainerDied","Data":"a886f8dba5976ac6fcc2a948631728d30f3262e7f6a10fa6b7dc6ae1f042bae4"} Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.978440 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d99f9bd59-ss24w" Jun 06 09:30:14 crc kubenswrapper[4911]: I0606 09:30:14.998257 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-11ea-account-create-k549j" podStartSLOduration=2.998240726 podStartE2EDuration="2.998240726s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:14.989675517 +0000 UTC m=+1026.265101080" watchObservedRunningTime="2025-06-06 09:30:14.998240726 +0000 UTC m=+1026.273666269" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.000984 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" event={"ID":"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828","Type":"ContainerStarted","Data":"097797275e3a5ae8afe6c1f9e1b103637b917b5a518f6f19ba1a072f94d60098"} Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.003609 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20a874bd-e22d-43cb-8d7c-061a21f7eeb9","Type":"ContainerStarted","Data":"e1c0f251d39b7c4cd906a1a9edc72422e6f2d899935eb8b6f20e906b06a76c82"} Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.009845 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e94a0c27-4650-4442-8ea2-c7d6ccf0f922","Type":"ContainerStarted","Data":"a14eee5b10ed0942f78e42685d5aaea18e70da231427759dac9ee01c0f311298"} Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.009947 4911 scope.go:117] "RemoveContainer" containerID="10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.031699 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1806d366-b45d-449a-8674-d9c2ff2e6740" (UID: "1806d366-b45d-449a-8674-d9c2ff2e6740"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.032681 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ed0-account-create-kt7pc" event={"ID":"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e","Type":"ContainerStarted","Data":"1fc52a84b4378ab280f9bc97e9294c9b0176086fae737be48eeb5e9aa0f662ec"} Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.042956 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1806d366-b45d-449a-8674-d9c2ff2e6740" (UID: "1806d366-b45d-449a-8674-d9c2ff2e6740"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.046085 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0587-account-create-5ljgg" event={"ID":"91e36afb-76a0-41d2-85d2-716055b255a6","Type":"ContainerStarted","Data":"b4ad9342c3d102ac613adb4933a65dd1c07d57ac8bd4261e6bac887924070ef8"} Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.047781 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22xr8\" (UniqueName: \"kubernetes.io/projected/1806d366-b45d-449a-8674-d9c2ff2e6740-kube-api-access-22xr8\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.047804 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.047813 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.056696 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1806d366-b45d-449a-8674-d9c2ff2e6740" (UID: "1806d366-b45d-449a-8674-d9c2ff2e6740"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.064344 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f46b79579-8wnpv"] Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.065500 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-config" (OuterVolumeSpecName: "config") pod "1806d366-b45d-449a-8674-d9c2ff2e6740" (UID: "1806d366-b45d-449a-8674-d9c2ff2e6740"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.067679 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1806d366-b45d-449a-8674-d9c2ff2e6740" (UID: "1806d366-b45d-449a-8674-d9c2ff2e6740"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.080351 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f46b79579-8wnpv"] Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.086655 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5ed0-account-create-kt7pc" podStartSLOduration=2.086630278 podStartE2EDuration="2.086630278s" podCreationTimestamp="2025-06-06 09:30:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:15.056152838 +0000 UTC m=+1026.331578391" watchObservedRunningTime="2025-06-06 09:30:15.086630278 +0000 UTC m=+1026.362055831" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.109814 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-0587-account-create-5ljgg" podStartSLOduration=3.109793271 podStartE2EDuration="3.109793271s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:15.089330327 +0000 UTC m=+1026.364755870" watchObservedRunningTime="2025-06-06 09:30:15.109793271 +0000 UTC m=+1026.385218814" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.152002 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.152049 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.152066 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1806d366-b45d-449a-8674-d9c2ff2e6740-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.315601 4911 scope.go:117] "RemoveContainer" containerID="6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.350512 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d99f9bd59-ss24w"] Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.358003 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d99f9bd59-ss24w"] Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.362319 4911 scope.go:117] "RemoveContainer" containerID="10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565" Jun 06 09:30:15 crc kubenswrapper[4911]: E0606 09:30:15.363006 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565\": container with ID starting with 10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565 not found: ID does not exist" containerID="10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.363061 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565"} err="failed to get container status \"10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565\": rpc error: code = NotFound desc = could not find container \"10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565\": container with ID starting with 10163bf112e293cf1562d5ab508287b64a0548bcd4f4476697968314b0ed6565 not found: ID does not exist" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.363116 4911 scope.go:117] "RemoveContainer" containerID="6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953" Jun 06 09:30:15 crc kubenswrapper[4911]: E0606 09:30:15.364002 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953\": container with ID starting with 6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953 not found: ID does not exist" containerID="6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.364035 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953"} err="failed to get container status \"6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953\": rpc error: code = NotFound desc = could not find container \"6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953\": container with ID starting with 6cddcd3bb54adc45e968fe866c7e6129d492d594d69a7a13738855bf1bd2f953 not found: ID does not exist" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.962952 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1806d366-b45d-449a-8674-d9c2ff2e6740" path="/var/lib/kubelet/pods/1806d366-b45d-449a-8674-d9c2ff2e6740/volumes" Jun 06 09:30:15 crc kubenswrapper[4911]: I0606 09:30:15.964333 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a1fccf4-006c-45bd-852d-775a4647b177" path="/var/lib/kubelet/pods/4a1fccf4-006c-45bd-852d-775a4647b177/volumes" Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.072110 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20a874bd-e22d-43cb-8d7c-061a21f7eeb9","Type":"ContainerStarted","Data":"99be2a96636f7999e621f2998f7e73e46680093c2abd6b6f08e94e6430e8ab94"} Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.075742 4911 generic.go:334] "Generic (PLEG): container finished" podID="368d0127-18f0-4227-b93f-67c3df9d17af" containerID="854febee88c5fb30b7120f26515beca8487550ded2f5e007328ec31cd97218e4" exitCode=0 Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.075816 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-11ea-account-create-k549j" event={"ID":"368d0127-18f0-4227-b93f-67c3df9d17af","Type":"ContainerDied","Data":"854febee88c5fb30b7120f26515beca8487550ded2f5e007328ec31cd97218e4"} Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.080693 4911 generic.go:334] "Generic (PLEG): container finished" podID="9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e" containerID="0cc039d4b3d67ca18a68997568fccacdb377707280cf0ae67b7b7cad903bb767" exitCode=0 Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.080774 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ed0-account-create-kt7pc" event={"ID":"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e","Type":"ContainerDied","Data":"0cc039d4b3d67ca18a68997568fccacdb377707280cf0ae67b7b7cad903bb767"} Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.082679 4911 generic.go:334] "Generic (PLEG): container finished" podID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerID="443ceda3db9f7c9198e9582b4218e5bcbcf84576cb2b103e791a6ee9987676be" exitCode=0 Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.082748 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" event={"ID":"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828","Type":"ContainerDied","Data":"443ceda3db9f7c9198e9582b4218e5bcbcf84576cb2b103e791a6ee9987676be"} Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.084182 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e94a0c27-4650-4442-8ea2-c7d6ccf0f922","Type":"ContainerStarted","Data":"da927e8bdd09b03e536c4d700c10c47066bea8e00d122f7376dbdd1fe73704a2"} Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.086667 4911 generic.go:334] "Generic (PLEG): container finished" podID="9dd0a8e8-2790-448d-85ed-753fe1389bab" containerID="1b2c399716ad1cfdc7242e7926cdecd5413c8c62c5920e9e9f67ff4276ff5b95" exitCode=0 Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.086718 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-0561-account-create-6k25c" event={"ID":"9dd0a8e8-2790-448d-85ed-753fe1389bab","Type":"ContainerDied","Data":"1b2c399716ad1cfdc7242e7926cdecd5413c8c62c5920e9e9f67ff4276ff5b95"} Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.089277 4911 generic.go:334] "Generic (PLEG): container finished" podID="91e36afb-76a0-41d2-85d2-716055b255a6" containerID="f95599ab53760cfaedb276e449a8553b62c570cde0593fb5ef35148c339bff5c" exitCode=0 Jun 06 09:30:16 crc kubenswrapper[4911]: I0606 09:30:16.089337 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0587-account-create-5ljgg" event={"ID":"91e36afb-76a0-41d2-85d2-716055b255a6","Type":"ContainerDied","Data":"f95599ab53760cfaedb276e449a8553b62c570cde0593fb5ef35148c339bff5c"} Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.102201 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20a874bd-e22d-43cb-8d7c-061a21f7eeb9","Type":"ContainerStarted","Data":"fa3d21519b6d20ae3598dc7d0dd4f3110e30a3f71093030980571465a527f623"} Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.102491 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-log" containerID="cri-o://99be2a96636f7999e621f2998f7e73e46680093c2abd6b6f08e94e6430e8ab94" gracePeriod=30 Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.102586 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-httpd" containerID="cri-o://fa3d21519b6d20ae3598dc7d0dd4f3110e30a3f71093030980571465a527f623" gracePeriod=30 Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.115870 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-log" containerID="cri-o://da927e8bdd09b03e536c4d700c10c47066bea8e00d122f7376dbdd1fe73704a2" gracePeriod=30 Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.116146 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e94a0c27-4650-4442-8ea2-c7d6ccf0f922","Type":"ContainerStarted","Data":"bdec67d245c09ba6cf763d503745b02f8d6fdd5e03ccf0f36493a87d8080cc5c"} Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.116381 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-httpd" containerID="cri-o://bdec67d245c09ba6cf763d503745b02f8d6fdd5e03ccf0f36493a87d8080cc5c" gracePeriod=30 Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.130798 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.130775391 podStartE2EDuration="5.130775391s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:17.124619664 +0000 UTC m=+1028.400045227" watchObservedRunningTime="2025-06-06 09:30:17.130775391 +0000 UTC m=+1028.406200934" Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.168372 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.168324902 podStartE2EDuration="5.168324902s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:17.161649291 +0000 UTC m=+1028.437074834" watchObservedRunningTime="2025-06-06 09:30:17.168324902 +0000 UTC m=+1028.443750455" Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.796859 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-11ea-account-create-k549j" Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.897907 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-457bm\" (UniqueName: \"kubernetes.io/projected/368d0127-18f0-4227-b93f-67c3df9d17af-kube-api-access-457bm\") pod \"368d0127-18f0-4227-b93f-67c3df9d17af\" (UID: \"368d0127-18f0-4227-b93f-67c3df9d17af\") " Jun 06 09:30:17 crc kubenswrapper[4911]: I0606 09:30:17.903173 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/368d0127-18f0-4227-b93f-67c3df9d17af-kube-api-access-457bm" (OuterVolumeSpecName: "kube-api-access-457bm") pod "368d0127-18f0-4227-b93f-67c3df9d17af" (UID: "368d0127-18f0-4227-b93f-67c3df9d17af"). InnerVolumeSpecName "kube-api-access-457bm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.000747 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-457bm\" (UniqueName: \"kubernetes.io/projected/368d0127-18f0-4227-b93f-67c3df9d17af-kube-api-access-457bm\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.129280 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-11ea-account-create-k549j" event={"ID":"368d0127-18f0-4227-b93f-67c3df9d17af","Type":"ContainerDied","Data":"8a39a20c4317be4d10b24abc5b99368136a690b1e7aaf7e48a5c574f49d789a0"} Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.129333 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a39a20c4317be4d10b24abc5b99368136a690b1e7aaf7e48a5c574f49d789a0" Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.129327 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-11ea-account-create-k549j" Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.132569 4911 generic.go:334] "Generic (PLEG): container finished" podID="d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" containerID="ea065b61e819ce6b2afc0059fe4008cefcc9a3619ab6fe8dba72ce28afdb0218" exitCode=0 Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.132643 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zb42f" event={"ID":"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d","Type":"ContainerDied","Data":"ea065b61e819ce6b2afc0059fe4008cefcc9a3619ab6fe8dba72ce28afdb0218"} Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.136236 4911 generic.go:334] "Generic (PLEG): container finished" podID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerID="fa3d21519b6d20ae3598dc7d0dd4f3110e30a3f71093030980571465a527f623" exitCode=0 Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.136270 4911 generic.go:334] "Generic (PLEG): container finished" podID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerID="99be2a96636f7999e621f2998f7e73e46680093c2abd6b6f08e94e6430e8ab94" exitCode=143 Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.136314 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20a874bd-e22d-43cb-8d7c-061a21f7eeb9","Type":"ContainerDied","Data":"fa3d21519b6d20ae3598dc7d0dd4f3110e30a3f71093030980571465a527f623"} Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.136336 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20a874bd-e22d-43cb-8d7c-061a21f7eeb9","Type":"ContainerDied","Data":"99be2a96636f7999e621f2998f7e73e46680093c2abd6b6f08e94e6430e8ab94"} Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.139140 4911 generic.go:334] "Generic (PLEG): container finished" podID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerID="bdec67d245c09ba6cf763d503745b02f8d6fdd5e03ccf0f36493a87d8080cc5c" exitCode=0 Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.139178 4911 generic.go:334] "Generic (PLEG): container finished" podID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerID="da927e8bdd09b03e536c4d700c10c47066bea8e00d122f7376dbdd1fe73704a2" exitCode=143 Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.139208 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e94a0c27-4650-4442-8ea2-c7d6ccf0f922","Type":"ContainerDied","Data":"bdec67d245c09ba6cf763d503745b02f8d6fdd5e03ccf0f36493a87d8080cc5c"} Jun 06 09:30:18 crc kubenswrapper[4911]: I0606 09:30:18.139239 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e94a0c27-4650-4442-8ea2-c7d6ccf0f922","Type":"ContainerDied","Data":"da927e8bdd09b03e536c4d700c10c47066bea8e00d122f7376dbdd1fe73704a2"} Jun 06 09:30:19 crc kubenswrapper[4911]: I0606 09:30:19.878261 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ed0-account-create-kt7pc" Jun 06 09:30:19 crc kubenswrapper[4911]: I0606 09:30:19.911845 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:19 crc kubenswrapper[4911]: I0606 09:30:19.923969 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-0561-account-create-6k25c" Jun 06 09:30:19 crc kubenswrapper[4911]: I0606 09:30:19.968704 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0587-account-create-5ljgg" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.021978 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039073 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dwx5\" (UniqueName: \"kubernetes.io/projected/91e36afb-76a0-41d2-85d2-716055b255a6-kube-api-access-8dwx5\") pod \"91e36afb-76a0-41d2-85d2-716055b255a6\" (UID: \"91e36afb-76a0-41d2-85d2-716055b255a6\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039272 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-credential-keys\") pod \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039437 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-scripts\") pod \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039478 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt56s\" (UniqueName: \"kubernetes.io/projected/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-kube-api-access-jt56s\") pod \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039522 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtcr2\" (UniqueName: \"kubernetes.io/projected/9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e-kube-api-access-xtcr2\") pod \"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e\" (UID: \"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039579 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-combined-ca-bundle\") pod \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039712 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-config-data\") pod \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039791 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdvtm\" (UniqueName: \"kubernetes.io/projected/9dd0a8e8-2790-448d-85ed-753fe1389bab-kube-api-access-vdvtm\") pod \"9dd0a8e8-2790-448d-85ed-753fe1389bab\" (UID: \"9dd0a8e8-2790-448d-85ed-753fe1389bab\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.039849 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-fernet-keys\") pod \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\" (UID: \"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.044473 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-scripts" (OuterVolumeSpecName: "scripts") pod "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" (UID: "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.048815 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-kube-api-access-jt56s" (OuterVolumeSpecName: "kube-api-access-jt56s") pod "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" (UID: "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d"). InnerVolumeSpecName "kube-api-access-jt56s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.049727 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e-kube-api-access-xtcr2" (OuterVolumeSpecName: "kube-api-access-xtcr2") pod "9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e" (UID: "9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e"). InnerVolumeSpecName "kube-api-access-xtcr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.052331 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e36afb-76a0-41d2-85d2-716055b255a6-kube-api-access-8dwx5" (OuterVolumeSpecName: "kube-api-access-8dwx5") pod "91e36afb-76a0-41d2-85d2-716055b255a6" (UID: "91e36afb-76a0-41d2-85d2-716055b255a6"). InnerVolumeSpecName "kube-api-access-8dwx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.053939 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" (UID: "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.057409 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dd0a8e8-2790-448d-85ed-753fe1389bab-kube-api-access-vdvtm" (OuterVolumeSpecName: "kube-api-access-vdvtm") pod "9dd0a8e8-2790-448d-85ed-753fe1389bab" (UID: "9dd0a8e8-2790-448d-85ed-753fe1389bab"). InnerVolumeSpecName "kube-api-access-vdvtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.057538 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" (UID: "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.079487 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" (UID: "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.085399 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-config-data" (OuterVolumeSpecName: "config-data") pod "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" (UID: "d52ffa6b-91af-4bd5-8e61-ac873c30fb4d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141381 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141462 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-scripts\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141527 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-combined-ca-bundle\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141565 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llp5z\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-kube-api-access-llp5z\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141607 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-ceph\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141626 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-httpd-run\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141669 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-logs\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141690 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-internal-tls-certs\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.141752 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-config-data\") pod \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\" (UID: \"e94a0c27-4650-4442-8ea2-c7d6ccf0f922\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142128 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdvtm\" (UniqueName: \"kubernetes.io/projected/9dd0a8e8-2790-448d-85ed-753fe1389bab-kube-api-access-vdvtm\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142148 4911 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-fernet-keys\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142159 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dwx5\" (UniqueName: \"kubernetes.io/projected/91e36afb-76a0-41d2-85d2-716055b255a6-kube-api-access-8dwx5\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142168 4911 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-credential-keys\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142176 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142188 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt56s\" (UniqueName: \"kubernetes.io/projected/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-kube-api-access-jt56s\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142197 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtcr2\" (UniqueName: \"kubernetes.io/projected/9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e-kube-api-access-xtcr2\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142206 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.142214 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.144700 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.145075 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-logs" (OuterVolumeSpecName: "logs") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.145276 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.147046 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-ceph" (OuterVolumeSpecName: "ceph") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.149329 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-scripts" (OuterVolumeSpecName: "scripts") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.149697 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-kube-api-access-llp5z" (OuterVolumeSpecName: "kube-api-access-llp5z") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "kube-api-access-llp5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.170270 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerStarted","Data":"152d22c0100c07a1682d56ea649bf3af19c5c73d1d87c99d3af0199a22288e02"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.174277 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0587-account-create-5ljgg" event={"ID":"91e36afb-76a0-41d2-85d2-716055b255a6","Type":"ContainerDied","Data":"b4ad9342c3d102ac613adb4933a65dd1c07d57ac8bd4261e6bac887924070ef8"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.174342 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4ad9342c3d102ac613adb4933a65dd1c07d57ac8bd4261e6bac887924070ef8" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.174938 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0587-account-create-5ljgg" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.175128 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.178905 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" event={"ID":"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828","Type":"ContainerStarted","Data":"46aea72fa1582226af28b7a0d2a24f45a773a34bee9ad226aa18e83a26a32748"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.178953 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.179303 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.185377 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zb42f" event={"ID":"d52ffa6b-91af-4bd5-8e61-ac873c30fb4d","Type":"ContainerDied","Data":"aff27e5883f1e1ee93cb515ddf7556ed5cbd9f111191162753a13c8ea84a784c"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.185445 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aff27e5883f1e1ee93cb515ddf7556ed5cbd9f111191162753a13c8ea84a784c" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.185384 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zb42f" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.188153 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-0561-account-create-6k25c" event={"ID":"9dd0a8e8-2790-448d-85ed-753fe1389bab","Type":"ContainerDied","Data":"d90dd2fffa30b635a57f8028e04275705611a214b340ca9b285dfc446e217503"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.188195 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d90dd2fffa30b635a57f8028e04275705611a214b340ca9b285dfc446e217503" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.188236 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-0561-account-create-6k25c" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.192721 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-grtxs" event={"ID":"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099","Type":"ContainerStarted","Data":"34457123d8c5b78aefa3d58c4496b63a36ec290335b890259c6da6cd11c9f1d8"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.196195 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.196221 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"20a874bd-e22d-43cb-8d7c-061a21f7eeb9","Type":"ContainerDied","Data":"e1c0f251d39b7c4cd906a1a9edc72422e6f2d899935eb8b6f20e906b06a76c82"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.196273 4911 scope.go:117] "RemoveContainer" containerID="fa3d21519b6d20ae3598dc7d0dd4f3110e30a3f71093030980571465a527f623" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.197863 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-config-data" (OuterVolumeSpecName: "config-data") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.201039 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e94a0c27-4650-4442-8ea2-c7d6ccf0f922","Type":"ContainerDied","Data":"a14eee5b10ed0942f78e42685d5aaea18e70da231427759dac9ee01c0f311298"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.201266 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.205758 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ed0-account-create-kt7pc" event={"ID":"9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e","Type":"ContainerDied","Data":"1fc52a84b4378ab280f9bc97e9294c9b0176086fae737be48eeb5e9aa0f662ec"} Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.205800 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ed0-account-create-kt7pc" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.205816 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fc52a84b4378ab280f9bc97e9294c9b0176086fae737be48eeb5e9aa0f662ec" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.206016 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podStartSLOduration=8.205792356 podStartE2EDuration="8.205792356s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:20.201559938 +0000 UTC m=+1031.476985481" watchObservedRunningTime="2025-06-06 09:30:20.205792356 +0000 UTC m=+1031.481217899" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.213327 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e94a0c27-4650-4442-8ea2-c7d6ccf0f922" (UID: "e94a0c27-4650-4442-8ea2-c7d6ccf0f922"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.234326 4911 scope.go:117] "RemoveContainer" containerID="99be2a96636f7999e621f2998f7e73e46680093c2abd6b6f08e94e6430e8ab94" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243127 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-httpd-run\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243239 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-logs\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243295 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-scripts\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243327 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-config-data\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243416 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-combined-ca-bundle\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243605 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mw9cz\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-kube-api-access-mw9cz\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243697 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-ceph\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243741 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-public-tls-certs\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.243803 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\" (UID: \"20a874bd-e22d-43cb-8d7c-061a21f7eeb9\") " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244463 4911 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-ceph\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244488 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-httpd-run\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244500 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244511 4911 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244523 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244549 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244559 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244571 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.244584 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llp5z\" (UniqueName: \"kubernetes.io/projected/e94a0c27-4650-4442-8ea2-c7d6ccf0f922-kube-api-access-llp5z\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.247019 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-logs" (OuterVolumeSpecName: "logs") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.247447 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.253120 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.256783 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-grtxs" podStartSLOduration=2.886101267 podStartE2EDuration="8.25673683s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="2025-06-06 09:30:14.34152012 +0000 UTC m=+1025.616945663" lastFinishedPulling="2025-06-06 09:30:19.712155673 +0000 UTC m=+1030.987581226" observedRunningTime="2025-06-06 09:30:20.250338456 +0000 UTC m=+1031.525764009" watchObservedRunningTime="2025-06-06 09:30:20.25673683 +0000 UTC m=+1031.532162373" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.259862 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-kube-api-access-mw9cz" (OuterVolumeSpecName: "kube-api-access-mw9cz") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "kube-api-access-mw9cz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.265460 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-ceph" (OuterVolumeSpecName: "ceph") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.265723 4911 scope.go:117] "RemoveContainer" containerID="bdec67d245c09ba6cf763d503745b02f8d6fdd5e03ccf0f36493a87d8080cc5c" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.267575 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-scripts" (OuterVolumeSpecName: "scripts") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.285122 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.295442 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.306677 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-config-data" (OuterVolumeSpecName: "config-data") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.307866 4911 scope.go:117] "RemoveContainer" containerID="da927e8bdd09b03e536c4d700c10c47066bea8e00d122f7376dbdd1fe73704a2" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.310413 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-zb42f"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.327285 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-zb42f"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.328764 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "20a874bd-e22d-43cb-8d7c-061a21f7eeb9" (UID: "20a874bd-e22d-43cb-8d7c-061a21f7eeb9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346410 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mw9cz\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-kube-api-access-mw9cz\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346450 4911 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-ceph\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346464 4911 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346487 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346496 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-httpd-run\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346504 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346514 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346522 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346530 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20a874bd-e22d-43cb-8d7c-061a21f7eeb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.346538 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.362661 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400276 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-cgpnm"] Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400747 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-httpd" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400773 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-httpd" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400793 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-log" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400802 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-log" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400827 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-log" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400835 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-log" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400849 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerName="init" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400858 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerName="init" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400868 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerName="dnsmasq-dns" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400874 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerName="dnsmasq-dns" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400884 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dd0a8e8-2790-448d-85ed-753fe1389bab" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400891 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dd0a8e8-2790-448d-85ed-753fe1389bab" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400904 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="368d0127-18f0-4227-b93f-67c3df9d17af" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400911 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="368d0127-18f0-4227-b93f-67c3df9d17af" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400921 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400928 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400949 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-httpd" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400955 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-httpd" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400967 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" containerName="keystone-bootstrap" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400973 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" containerName="keystone-bootstrap" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.400985 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1fccf4-006c-45bd-852d-775a4647b177" containerName="init" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.400992 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1fccf4-006c-45bd-852d-775a4647b177" containerName="init" Jun 06 09:30:20 crc kubenswrapper[4911]: E0606 09:30:20.401010 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e36afb-76a0-41d2-85d2-716055b255a6" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401049 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e36afb-76a0-41d2-85d2-716055b255a6" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401256 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-log" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401269 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-log" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401285 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401294 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="368d0127-18f0-4227-b93f-67c3df9d17af" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401309 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" containerName="keystone-bootstrap" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401317 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e36afb-76a0-41d2-85d2-716055b255a6" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401329 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dd0a8e8-2790-448d-85ed-753fe1389bab" containerName="mariadb-account-create" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401341 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" containerName="glance-httpd" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401359 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1806d366-b45d-449a-8674-d9c2ff2e6740" containerName="dnsmasq-dns" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401371 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" containerName="glance-httpd" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401387 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a1fccf4-006c-45bd-852d-775a4647b177" containerName="init" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.401995 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.404883 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5n65" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.405005 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.405107 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.405212 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.409070 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cgpnm"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.447976 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.534850 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.550343 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-combined-ca-bundle\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.550426 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-config-data\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.550476 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-fernet-keys\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.550503 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-credential-keys\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.550672 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfdz9\" (UniqueName: \"kubernetes.io/projected/d576ab7b-1286-4a6f-b43a-183187d822c7-kube-api-access-vfdz9\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.550773 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-scripts\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.554847 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.566421 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.573304 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.582432 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.584299 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.590553 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.590826 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.591020 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.591198 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-z87sl" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.591546 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.591785 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.593539 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.596021 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.596041 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.598831 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.606504 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652270 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652332 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-credential-keys\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652362 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652389 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twdmt\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-kube-api-access-twdmt\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652427 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652452 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-logs\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652478 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-scripts\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652503 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-ceph\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfdz9\" (UniqueName: \"kubernetes.io/projected/d576ab7b-1286-4a6f-b43a-183187d822c7-kube-api-access-vfdz9\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652876 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-config-data\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652895 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.652986 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-scripts\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.653325 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-logs\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.653384 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.653452 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654160 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-combined-ca-bundle\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654202 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8c66\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-kube-api-access-m8c66\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654244 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654267 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654488 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654597 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-config-data\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654660 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654700 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-fernet-keys\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.654793 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.656785 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-scripts\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.662468 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-credential-keys\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.663050 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-fernet-keys\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.663391 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-config-data\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.673361 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-combined-ca-bundle\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.680769 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfdz9\" (UniqueName: \"kubernetes.io/projected/d576ab7b-1286-4a6f-b43a-183187d822c7-kube-api-access-vfdz9\") pod \"keystone-bootstrap-cgpnm\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.742480 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758145 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758214 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758244 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758265 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twdmt\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-kube-api-access-twdmt\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758299 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758322 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-scripts\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758340 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-logs\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758359 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-ceph\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758401 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-config-data\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758416 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758474 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-logs\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758493 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758527 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758564 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8c66\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-kube-api-access-m8c66\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758588 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758604 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758629 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.758677 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.759575 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-logs\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.763646 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-scripts\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.763993 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.764376 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.764661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-logs\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.765236 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.766112 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.766241 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.766686 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.768255 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.768331 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.775246 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.775496 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-config-data\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.780488 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twdmt\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-kube-api-access-twdmt\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.780497 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.781239 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-ceph\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.786846 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.791231 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8c66\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-kube-api-access-m8c66\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.806400 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.807028 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.895349 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:30:20 crc kubenswrapper[4911]: I0606 09:30:20.918228 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:21 crc kubenswrapper[4911]: I0606 09:30:21.226814 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cgpnm"] Jun 06 09:30:21 crc kubenswrapper[4911]: W0606 09:30:21.327872 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd576ab7b_1286_4a6f_b43a_183187d822c7.slice/crio-6c50ec9fa440abc5e213e03196dfe83cf547f6346c741e7b0972a67a5d5abf77 WatchSource:0}: Error finding container 6c50ec9fa440abc5e213e03196dfe83cf547f6346c741e7b0972a67a5d5abf77: Status 404 returned error can't find the container with id 6c50ec9fa440abc5e213e03196dfe83cf547f6346c741e7b0972a67a5d5abf77 Jun 06 09:30:21 crc kubenswrapper[4911]: W0606 09:30:21.525338 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6909d9d4_e754_4ac5_8f77_e20e48f96714.slice/crio-34328d87ddfacd3970195098329af376246635c22c71f53b76bb2942470d94aa WatchSource:0}: Error finding container 34328d87ddfacd3970195098329af376246635c22c71f53b76bb2942470d94aa: Status 404 returned error can't find the container with id 34328d87ddfacd3970195098329af376246635c22c71f53b76bb2942470d94aa Jun 06 09:30:21 crc kubenswrapper[4911]: I0606 09:30:21.530486 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:30:21 crc kubenswrapper[4911]: I0606 09:30:21.979723 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20a874bd-e22d-43cb-8d7c-061a21f7eeb9" path="/var/lib/kubelet/pods/20a874bd-e22d-43cb-8d7c-061a21f7eeb9/volumes" Jun 06 09:30:21 crc kubenswrapper[4911]: I0606 09:30:21.981682 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d52ffa6b-91af-4bd5-8e61-ac873c30fb4d" path="/var/lib/kubelet/pods/d52ffa6b-91af-4bd5-8e61-ac873c30fb4d/volumes" Jun 06 09:30:21 crc kubenswrapper[4911]: I0606 09:30:21.982584 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e94a0c27-4650-4442-8ea2-c7d6ccf0f922" path="/var/lib/kubelet/pods/e94a0c27-4650-4442-8ea2-c7d6ccf0f922/volumes" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.081067 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:30:22 crc kubenswrapper[4911]: W0606 09:30:22.087700 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39f3ac21_297f_4acd_b430_cf0dcd3d56ff.slice/crio-7011273400e464fffcd6b38a48a9199e57fe908da7414392c0506e42e219aa97 WatchSource:0}: Error finding container 7011273400e464fffcd6b38a48a9199e57fe908da7414392c0506e42e219aa97: Status 404 returned error can't find the container with id 7011273400e464fffcd6b38a48a9199e57fe908da7414392c0506e42e219aa97 Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.244777 4911 generic.go:334] "Generic (PLEG): container finished" podID="8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" containerID="34457123d8c5b78aefa3d58c4496b63a36ec290335b890259c6da6cd11c9f1d8" exitCode=0 Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.244887 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-grtxs" event={"ID":"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099","Type":"ContainerDied","Data":"34457123d8c5b78aefa3d58c4496b63a36ec290335b890259c6da6cd11c9f1d8"} Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.249041 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"39f3ac21-297f-4acd-b430-cf0dcd3d56ff","Type":"ContainerStarted","Data":"7011273400e464fffcd6b38a48a9199e57fe908da7414392c0506e42e219aa97"} Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.253673 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgpnm" event={"ID":"d576ab7b-1286-4a6f-b43a-183187d822c7","Type":"ContainerStarted","Data":"bc76585bc85553e0ac8f84005c4c0def79d4b0b5ad65d19abb7e4d4ca6ea9136"} Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.253731 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgpnm" event={"ID":"d576ab7b-1286-4a6f-b43a-183187d822c7","Type":"ContainerStarted","Data":"6c50ec9fa440abc5e213e03196dfe83cf547f6346c741e7b0972a67a5d5abf77"} Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.258194 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6909d9d4-e754-4ac5-8f77-e20e48f96714","Type":"ContainerStarted","Data":"1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42"} Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.258249 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6909d9d4-e754-4ac5-8f77-e20e48f96714","Type":"ContainerStarted","Data":"34328d87ddfacd3970195098329af376246635c22c71f53b76bb2942470d94aa"} Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.265866 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerStarted","Data":"87f695fa24644cc3770ac07f18eb4de07781e0a45500453515474faec11842f0"} Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.281583 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-cgpnm" podStartSLOduration=2.281538898 podStartE2EDuration="2.281538898s" podCreationTimestamp="2025-06-06 09:30:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:22.280133922 +0000 UTC m=+1033.555559475" watchObservedRunningTime="2025-06-06 09:30:22.281538898 +0000 UTC m=+1033.556964441" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.874534 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-4j5dr"] Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.876237 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.879536 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.879624 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-pb46k" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.879783 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.888782 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-4j5dr"] Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.899940 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-755vx\" (UniqueName: \"kubernetes.io/projected/2ac55e69-d8fc-414b-add4-1d60dfcee487-kube-api-access-755vx\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.899983 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-combined-ca-bundle\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.900030 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-localtime\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.900053 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-db-sync-config-data\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.900081 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-config-data\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.900133 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-scripts\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:22 crc kubenswrapper[4911]: I0606 09:30:22.900158 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-machine-id\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.001812 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-755vx\" (UniqueName: \"kubernetes.io/projected/2ac55e69-d8fc-414b-add4-1d60dfcee487-kube-api-access-755vx\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.001866 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-combined-ca-bundle\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.001940 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-localtime\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.001967 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-db-sync-config-data\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.002016 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-config-data\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.002110 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-scripts\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.002135 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-machine-id\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.005379 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-localtime\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.006036 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-machine-id\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.016183 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-scripts\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.016431 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-combined-ca-bundle\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.016544 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-config-data\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.019701 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-755vx\" (UniqueName: \"kubernetes.io/projected/2ac55e69-d8fc-414b-add4-1d60dfcee487-kube-api-access-755vx\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.025324 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-db-sync-config-data\") pod \"cinder-db-sync-4j5dr\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.195209 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.273234 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-69kzd"] Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.274382 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.276885 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-bw2dv" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.277050 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.291684 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-69kzd"] Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.306890 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6909d9d4-e754-4ac5-8f77-e20e48f96714","Type":"ContainerStarted","Data":"38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202"} Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.312405 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-combined-ca-bundle\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.312601 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25lpk\" (UniqueName: \"kubernetes.io/projected/4c18bbba-7f7b-4601-a8d0-971323c798ac-kube-api-access-25lpk\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.312736 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-db-sync-config-data\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.312977 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"39f3ac21-297f-4acd-b430-cf0dcd3d56ff","Type":"ContainerStarted","Data":"cb43c126ab9d2f48c1da518fc8c59546e84cca87926a978fcf680190b45a07c2"} Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.331839 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.331823265 podStartE2EDuration="3.331823265s" podCreationTimestamp="2025-06-06 09:30:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:23.331317313 +0000 UTC m=+1034.606742856" watchObservedRunningTime="2025-06-06 09:30:23.331823265 +0000 UTC m=+1034.607248808" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.389744 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-q7cj8"] Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.391329 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.394702 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-pffs5" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.394759 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.400684 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-q7cj8"] Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.415329 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-combined-ca-bundle\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.415467 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-job-config-data\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.415514 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mc6p\" (UniqueName: \"kubernetes.io/projected/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-kube-api-access-4mc6p\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.415580 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-config-data\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.415650 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-db-sync-config-data\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.415746 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-combined-ca-bundle\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.415929 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25lpk\" (UniqueName: \"kubernetes.io/projected/4c18bbba-7f7b-4601-a8d0-971323c798ac-kube-api-access-25lpk\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.432772 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-db-sync-config-data\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.442912 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-combined-ca-bundle\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.453000 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25lpk\" (UniqueName: \"kubernetes.io/projected/4c18bbba-7f7b-4601-a8d0-971323c798ac-kube-api-access-25lpk\") pod \"barbican-db-sync-69kzd\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.465175 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4hdj9"] Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.466735 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.473709 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.473970 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.474223 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-s2nhg" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.488591 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4hdj9"] Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.517693 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzl5g\" (UniqueName: \"kubernetes.io/projected/952f6db6-b318-4bd1-8052-653fa6484d05-kube-api-access-nzl5g\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.517823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-combined-ca-bundle\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.517861 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-combined-ca-bundle\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.517897 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-config\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.517925 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-job-config-data\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.518060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mc6p\" (UniqueName: \"kubernetes.io/projected/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-kube-api-access-4mc6p\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.518135 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-config-data\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.525310 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-combined-ca-bundle\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.525313 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-config-data\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.527609 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-job-config-data\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.553529 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mc6p\" (UniqueName: \"kubernetes.io/projected/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-kube-api-access-4mc6p\") pod \"manila-db-sync-q7cj8\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.620257 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-combined-ca-bundle\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.620326 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-config\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.620417 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzl5g\" (UniqueName: \"kubernetes.io/projected/952f6db6-b318-4bd1-8052-653fa6484d05-kube-api-access-nzl5g\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.628449 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-combined-ca-bundle\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.631853 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-69kzd" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.635497 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-config\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.638871 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzl5g\" (UniqueName: \"kubernetes.io/projected/952f6db6-b318-4bd1-8052-653fa6484d05-kube-api-access-nzl5g\") pod \"neutron-db-sync-4hdj9\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.728170 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-4j5dr"] Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.741922 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-q7cj8" Jun 06 09:30:23 crc kubenswrapper[4911]: W0606 09:30:23.746835 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ac55e69_d8fc_414b_add4_1d60dfcee487.slice/crio-f947066bc5bb3708c2e3cc9c3d665d10c85fc6b7ca2ef80dbab8505a930b9089 WatchSource:0}: Error finding container f947066bc5bb3708c2e3cc9c3d665d10c85fc6b7ca2ef80dbab8505a930b9089: Status 404 returned error can't find the container with id f947066bc5bb3708c2e3cc9c3d665d10c85fc6b7ca2ef80dbab8505a930b9089 Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.787827 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.811645 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.823445 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-logs\") pod \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.823513 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-combined-ca-bundle\") pod \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.823569 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-scripts\") pod \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.823610 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sx4m\" (UniqueName: \"kubernetes.io/projected/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-kube-api-access-2sx4m\") pod \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.823720 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-config-data\") pod \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\" (UID: \"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099\") " Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.825394 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-logs" (OuterVolumeSpecName: "logs") pod "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" (UID: "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.834482 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-scripts" (OuterVolumeSpecName: "scripts") pod "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" (UID: "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.835228 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-kube-api-access-2sx4m" (OuterVolumeSpecName: "kube-api-access-2sx4m") pod "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" (UID: "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099"). InnerVolumeSpecName "kube-api-access-2sx4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.855028 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-config-data" (OuterVolumeSpecName: "config-data") pod "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" (UID: "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.873851 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" (UID: "8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.926305 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.926337 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.926347 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.926356 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sx4m\" (UniqueName: \"kubernetes.io/projected/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-kube-api-access-2sx4m\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:23 crc kubenswrapper[4911]: I0606 09:30:23.926364 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.216744 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-69kzd"] Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.223541 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4hdj9"] Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.336868 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-69kzd" event={"ID":"4c18bbba-7f7b-4601-a8d0-971323c798ac","Type":"ContainerStarted","Data":"f6efb9ccfe37f92964294bb788eb068a3c286bffca3b68e6e8c1fc9db2d3b5e8"} Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.339055 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4hdj9" event={"ID":"952f6db6-b318-4bd1-8052-653fa6484d05","Type":"ContainerStarted","Data":"3d15d817cabaa8a3cbea80ec5dccf87b430a758f6575b7be27b2869637f8212d"} Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.342481 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-grtxs" event={"ID":"8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099","Type":"ContainerDied","Data":"1b0f0b29a3b1008d8f85d3742f33d72c1789ff931bc0d76b364d09b81fcf538b"} Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.342507 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b0f0b29a3b1008d8f85d3742f33d72c1789ff931bc0d76b364d09b81fcf538b" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.342546 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-grtxs" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.354309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"39f3ac21-297f-4acd-b430-cf0dcd3d56ff","Type":"ContainerStarted","Data":"3730324e7d934b64ed51c4247ef3da62ca46bc5515eb96961c232db1c2bc591a"} Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.360839 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4j5dr" event={"ID":"2ac55e69-d8fc-414b-add4-1d60dfcee487","Type":"ContainerStarted","Data":"f947066bc5bb3708c2e3cc9c3d665d10c85fc6b7ca2ef80dbab8505a930b9089"} Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.374375 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-q7cj8"] Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.377822 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.377804824 podStartE2EDuration="4.377804824s" podCreationTimestamp="2025-06-06 09:30:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:24.377406914 +0000 UTC m=+1035.652832477" watchObservedRunningTime="2025-06-06 09:30:24.377804824 +0000 UTC m=+1035.653230907" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.410375 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-78bb87b554-zfswk"] Jun 06 09:30:24 crc kubenswrapper[4911]: E0606 09:30:24.410889 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" containerName="placement-db-sync" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.410910 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" containerName="placement-db-sync" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.411141 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" containerName="placement-db-sync" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.412413 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.415173 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.415447 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qvqnr" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.415208 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.415798 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.415970 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.419216 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-78bb87b554-zfswk"] Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.439320 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-scripts\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.439441 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ecf7a83-e983-4766-a328-31fc235a59c0-logs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.439547 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-config-data\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.439684 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-combined-ca-bundle\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.439706 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-internal-tls-certs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.439726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkz7f\" (UniqueName: \"kubernetes.io/projected/7ecf7a83-e983-4766-a328-31fc235a59c0-kube-api-access-bkz7f\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.439787 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-public-tls-certs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.541317 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-public-tls-certs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.541424 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-scripts\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.541505 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ecf7a83-e983-4766-a328-31fc235a59c0-logs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.541560 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-config-data\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.541631 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-combined-ca-bundle\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.541657 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-internal-tls-certs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.541701 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkz7f\" (UniqueName: \"kubernetes.io/projected/7ecf7a83-e983-4766-a328-31fc235a59c0-kube-api-access-bkz7f\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.543894 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ecf7a83-e983-4766-a328-31fc235a59c0-logs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.546577 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-internal-tls-certs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.546828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-config-data\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.546923 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-public-tls-certs\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.547467 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-combined-ca-bundle\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.552047 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ecf7a83-e983-4766-a328-31fc235a59c0-scripts\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.562076 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkz7f\" (UniqueName: \"kubernetes.io/projected/7ecf7a83-e983-4766-a328-31fc235a59c0-kube-api-access-bkz7f\") pod \"placement-78bb87b554-zfswk\" (UID: \"7ecf7a83-e983-4766-a328-31fc235a59c0\") " pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:24 crc kubenswrapper[4911]: I0606 09:30:24.744498 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:25 crc kubenswrapper[4911]: I0606 09:30:25.283730 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-78bb87b554-zfswk"] Jun 06 09:30:25 crc kubenswrapper[4911]: W0606 09:30:25.284385 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ecf7a83_e983_4766_a328_31fc235a59c0.slice/crio-24dc11e3c20841f884069c3958c32130f02d80607bc67d5205fcef5346e6a42a WatchSource:0}: Error finding container 24dc11e3c20841f884069c3958c32130f02d80607bc67d5205fcef5346e6a42a: Status 404 returned error can't find the container with id 24dc11e3c20841f884069c3958c32130f02d80607bc67d5205fcef5346e6a42a Jun 06 09:30:25 crc kubenswrapper[4911]: I0606 09:30:25.373243 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4hdj9" event={"ID":"952f6db6-b318-4bd1-8052-653fa6484d05","Type":"ContainerStarted","Data":"d4b09a0cc37e89240a9330950c83e463d7279fcb473aca0cf411d746afc7a51c"} Jun 06 09:30:25 crc kubenswrapper[4911]: I0606 09:30:25.378845 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-q7cj8" event={"ID":"cd2440f6-658c-4edb-938c-b40f2d3f7cf4","Type":"ContainerStarted","Data":"b7e9943c0383cd953804addd6b91c6374c6812183fe04edbcd645375e754fda9"} Jun 06 09:30:25 crc kubenswrapper[4911]: I0606 09:30:25.381425 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-78bb87b554-zfswk" event={"ID":"7ecf7a83-e983-4766-a328-31fc235a59c0","Type":"ContainerStarted","Data":"24dc11e3c20841f884069c3958c32130f02d80607bc67d5205fcef5346e6a42a"} Jun 06 09:30:25 crc kubenswrapper[4911]: I0606 09:30:25.383396 4911 generic.go:334] "Generic (PLEG): container finished" podID="d576ab7b-1286-4a6f-b43a-183187d822c7" containerID="bc76585bc85553e0ac8f84005c4c0def79d4b0b5ad65d19abb7e4d4ca6ea9136" exitCode=0 Jun 06 09:30:25 crc kubenswrapper[4911]: I0606 09:30:25.383459 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgpnm" event={"ID":"d576ab7b-1286-4a6f-b43a-183187d822c7","Type":"ContainerDied","Data":"bc76585bc85553e0ac8f84005c4c0def79d4b0b5ad65d19abb7e4d4ca6ea9136"} Jun 06 09:30:25 crc kubenswrapper[4911]: I0606 09:30:25.397178 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4hdj9" podStartSLOduration=2.397156961 podStartE2EDuration="2.397156961s" podCreationTimestamp="2025-06-06 09:30:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:25.390661705 +0000 UTC m=+1036.666087268" watchObservedRunningTime="2025-06-06 09:30:25.397156961 +0000 UTC m=+1036.672582504" Jun 06 09:30:26 crc kubenswrapper[4911]: I0606 09:30:26.395651 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-78bb87b554-zfswk" event={"ID":"7ecf7a83-e983-4766-a328-31fc235a59c0","Type":"ContainerStarted","Data":"50a0886b6bdb396209810ff5bad8af151eafeb16bcea0404852eab12ad7c82ed"} Jun 06 09:30:26 crc kubenswrapper[4911]: I0606 09:30:26.396471 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-78bb87b554-zfswk" event={"ID":"7ecf7a83-e983-4766-a328-31fc235a59c0","Type":"ContainerStarted","Data":"b515926cc789c9a3acbfeaa498076fb38a6278540367bb960bcf2bd00f7c6c6b"} Jun 06 09:30:26 crc kubenswrapper[4911]: I0606 09:30:26.396934 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:26 crc kubenswrapper[4911]: I0606 09:30:26.397117 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:27 crc kubenswrapper[4911]: I0606 09:30:27.909673 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:30:27 crc kubenswrapper[4911]: I0606 09:30:27.944675 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-78bb87b554-zfswk" podStartSLOduration=3.944652116 podStartE2EDuration="3.944652116s" podCreationTimestamp="2025-06-06 09:30:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:26.418690034 +0000 UTC m=+1037.694115597" watchObservedRunningTime="2025-06-06 09:30:27.944652116 +0000 UTC m=+1039.220077669" Jun 06 09:30:27 crc kubenswrapper[4911]: I0606 09:30:27.990713 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-854687b54c-ssgsr"] Jun 06 09:30:27 crc kubenswrapper[4911]: I0606 09:30:27.991013 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="dnsmasq-dns" containerID="cri-o://91cef758908ebb576cf6fa2d7de7de17fc59ba392f3e51caccdec8607242595e" gracePeriod=10 Jun 06 09:30:28 crc kubenswrapper[4911]: I0606 09:30:28.108449 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Jun 06 09:30:28 crc kubenswrapper[4911]: I0606 09:30:28.416951 4911 generic.go:334] "Generic (PLEG): container finished" podID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerID="91cef758908ebb576cf6fa2d7de7de17fc59ba392f3e51caccdec8607242595e" exitCode=0 Jun 06 09:30:28 crc kubenswrapper[4911]: I0606 09:30:28.417317 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" event={"ID":"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204","Type":"ContainerDied","Data":"91cef758908ebb576cf6fa2d7de7de17fc59ba392f3e51caccdec8607242595e"} Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.813198 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.897243 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.897305 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.918836 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.921457 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.951344 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.951750 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.952944 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:30 crc kubenswrapper[4911]: I0606 09:30:30.960266 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.015192 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-fernet-keys\") pod \"d576ab7b-1286-4a6f-b43a-183187d822c7\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.015279 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-config-data\") pod \"d576ab7b-1286-4a6f-b43a-183187d822c7\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.015351 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-credential-keys\") pod \"d576ab7b-1286-4a6f-b43a-183187d822c7\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.015442 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfdz9\" (UniqueName: \"kubernetes.io/projected/d576ab7b-1286-4a6f-b43a-183187d822c7-kube-api-access-vfdz9\") pod \"d576ab7b-1286-4a6f-b43a-183187d822c7\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.015656 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-scripts\") pod \"d576ab7b-1286-4a6f-b43a-183187d822c7\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.015733 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-combined-ca-bundle\") pod \"d576ab7b-1286-4a6f-b43a-183187d822c7\" (UID: \"d576ab7b-1286-4a6f-b43a-183187d822c7\") " Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.022594 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d576ab7b-1286-4a6f-b43a-183187d822c7-kube-api-access-vfdz9" (OuterVolumeSpecName: "kube-api-access-vfdz9") pod "d576ab7b-1286-4a6f-b43a-183187d822c7" (UID: "d576ab7b-1286-4a6f-b43a-183187d822c7"). InnerVolumeSpecName "kube-api-access-vfdz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.024121 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d576ab7b-1286-4a6f-b43a-183187d822c7" (UID: "d576ab7b-1286-4a6f-b43a-183187d822c7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.028715 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d576ab7b-1286-4a6f-b43a-183187d822c7" (UID: "d576ab7b-1286-4a6f-b43a-183187d822c7"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.035468 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-scripts" (OuterVolumeSpecName: "scripts") pod "d576ab7b-1286-4a6f-b43a-183187d822c7" (UID: "d576ab7b-1286-4a6f-b43a-183187d822c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.046051 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-config-data" (OuterVolumeSpecName: "config-data") pod "d576ab7b-1286-4a6f-b43a-183187d822c7" (UID: "d576ab7b-1286-4a6f-b43a-183187d822c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.050324 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d576ab7b-1286-4a6f-b43a-183187d822c7" (UID: "d576ab7b-1286-4a6f-b43a-183187d822c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.117927 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.117968 4911 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-credential-keys\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.117980 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfdz9\" (UniqueName: \"kubernetes.io/projected/d576ab7b-1286-4a6f-b43a-183187d822c7-kube-api-access-vfdz9\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.117989 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.117997 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.118005 4911 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d576ab7b-1286-4a6f-b43a-183187d822c7-fernet-keys\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.459310 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgpnm" event={"ID":"d576ab7b-1286-4a6f-b43a-183187d822c7","Type":"ContainerDied","Data":"6c50ec9fa440abc5e213e03196dfe83cf547f6346c741e7b0972a67a5d5abf77"} Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.459351 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c50ec9fa440abc5e213e03196dfe83cf547f6346c741e7b0972a67a5d5abf77" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.459397 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgpnm" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.462625 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.462676 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.462694 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.462708 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.930539 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-58846dd748-sgvz6"] Jun 06 09:30:31 crc kubenswrapper[4911]: E0606 09:30:31.931261 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d576ab7b-1286-4a6f-b43a-183187d822c7" containerName="keystone-bootstrap" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.931276 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d576ab7b-1286-4a6f-b43a-183187d822c7" containerName="keystone-bootstrap" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.931540 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d576ab7b-1286-4a6f-b43a-183187d822c7" containerName="keystone-bootstrap" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.932262 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.935535 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5n65" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.939456 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.939722 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.939908 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.940388 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-58846dd748-sgvz6"] Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.940459 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jun 06 09:30:31 crc kubenswrapper[4911]: I0606 09:30:31.940559 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.034737 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59987\" (UniqueName: \"kubernetes.io/projected/b23e60ab-054b-41e9-98c1-a3b2abc02b52-kube-api-access-59987\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.034803 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-internal-tls-certs\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.034832 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-public-tls-certs\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.034873 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-fernet-keys\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.034910 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-credential-keys\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.034948 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-scripts\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.034975 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-combined-ca-bundle\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.035015 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-config-data\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137365 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-combined-ca-bundle\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137481 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-config-data\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59987\" (UniqueName: \"kubernetes.io/projected/b23e60ab-054b-41e9-98c1-a3b2abc02b52-kube-api-access-59987\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137647 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-internal-tls-certs\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137675 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-public-tls-certs\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137734 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-fernet-keys\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137782 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-credential-keys\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.137832 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-scripts\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.149058 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-combined-ca-bundle\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.149158 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-scripts\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.150920 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-config-data\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.153621 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-credential-keys\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.154918 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-internal-tls-certs\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.158234 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-fernet-keys\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.162953 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b23e60ab-054b-41e9-98c1-a3b2abc02b52-public-tls-certs\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.169214 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59987\" (UniqueName: \"kubernetes.io/projected/b23e60ab-054b-41e9-98c1-a3b2abc02b52-kube-api-access-59987\") pod \"keystone-58846dd748-sgvz6\" (UID: \"b23e60ab-054b-41e9-98c1-a3b2abc02b52\") " pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:32 crc kubenswrapper[4911]: I0606 09:30:32.264193 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:33 crc kubenswrapper[4911]: I0606 09:30:33.103870 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Jun 06 09:30:33 crc kubenswrapper[4911]: I0606 09:30:33.498664 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:30:33 crc kubenswrapper[4911]: I0606 09:30:33.499504 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:30:33 crc kubenswrapper[4911]: I0606 09:30:33.498688 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:30:33 crc kubenswrapper[4911]: I0606 09:30:33.499606 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:30:34 crc kubenswrapper[4911]: I0606 09:30:34.245426 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jun 06 09:30:34 crc kubenswrapper[4911]: I0606 09:30:34.248541 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jun 06 09:30:34 crc kubenswrapper[4911]: I0606 09:30:34.249293 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:34 crc kubenswrapper[4911]: I0606 09:30:34.250128 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jun 06 09:30:42 crc kubenswrapper[4911]: E0606 09:30:42.203464 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Jun 06 09:30:42 crc kubenswrapper[4911]: E0606 09:30:42.204451 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-25lpk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-69kzd_openstack(4c18bbba-7f7b-4601-a8d0-971323c798ac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jun 06 09:30:42 crc kubenswrapper[4911]: E0606 09:30:42.205725 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-69kzd" podUID="4c18bbba-7f7b-4601-a8d0-971323c798ac" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.314644 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.468637 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-swift-storage-0\") pod \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.468757 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-sb\") pod \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.469238 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-svc\") pod \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.469386 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfvsk\" (UniqueName: \"kubernetes.io/projected/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-kube-api-access-tfvsk\") pod \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.469519 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-config\") pod \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.469601 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-nb\") pod \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\" (UID: \"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204\") " Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.475605 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-kube-api-access-tfvsk" (OuterVolumeSpecName: "kube-api-access-tfvsk") pod "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" (UID: "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204"). InnerVolumeSpecName "kube-api-access-tfvsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.514774 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" (UID: "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.515956 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-config" (OuterVolumeSpecName: "config") pod "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" (UID: "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.516965 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" (UID: "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.518055 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" (UID: "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.525529 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" (UID: "eaf33d50-92ab-4ef6-8a0d-9ab0eb177204"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.572675 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfvsk\" (UniqueName: \"kubernetes.io/projected/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-kube-api-access-tfvsk\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.572728 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.572743 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.572757 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.572768 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.572779 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.582017 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" event={"ID":"eaf33d50-92ab-4ef6-8a0d-9ab0eb177204","Type":"ContainerDied","Data":"0001a3d2af78eeba0716de326153e1a51a3bde847732afc9fec9c6ef6041f4be"} Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.582084 4911 scope.go:117] "RemoveContainer" containerID="91cef758908ebb576cf6fa2d7de7de17fc59ba392f3e51caccdec8607242595e" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.582038 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" Jun 06 09:30:42 crc kubenswrapper[4911]: E0606 09:30:42.584760 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-69kzd" podUID="4c18bbba-7f7b-4601-a8d0-971323c798ac" Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.649738 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-854687b54c-ssgsr"] Jun 06 09:30:42 crc kubenswrapper[4911]: I0606 09:30:42.659192 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-854687b54c-ssgsr"] Jun 06 09:30:43 crc kubenswrapper[4911]: I0606 09:30:43.103871 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-854687b54c-ssgsr" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Jun 06 09:30:43 crc kubenswrapper[4911]: I0606 09:30:43.336837 4911 scope.go:117] "RemoveContainer" containerID="42b81dfa67bcddefa77a50445bf796a73e52419759b5f783c2c80358ac85de43" Jun 06 09:30:43 crc kubenswrapper[4911]: E0606 09:30:43.382465 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Jun 06 09:30:43 crc kubenswrapper[4911]: E0606 09:30:43.382674 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-localtime,ReadOnly:true,MountPath:/etc/localtime,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-755vx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-4j5dr_openstack(2ac55e69-d8fc-414b-add4-1d60dfcee487): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jun 06 09:30:43 crc kubenswrapper[4911]: E0606 09:30:43.384354 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-4j5dr" podUID="2ac55e69-d8fc-414b-add4-1d60dfcee487" Jun 06 09:30:43 crc kubenswrapper[4911]: E0606 09:30:43.597682 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-4j5dr" podUID="2ac55e69-d8fc-414b-add4-1d60dfcee487" Jun 06 09:30:43 crc kubenswrapper[4911]: I0606 09:30:43.766232 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-58846dd748-sgvz6"] Jun 06 09:30:43 crc kubenswrapper[4911]: W0606 09:30:43.778178 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb23e60ab_054b_41e9_98c1_a3b2abc02b52.slice/crio-11ad810f7291f53439b1496517d28cec00dc1f5094ec0049ed637553a94e20ff WatchSource:0}: Error finding container 11ad810f7291f53439b1496517d28cec00dc1f5094ec0049ed637553a94e20ff: Status 404 returned error can't find the container with id 11ad810f7291f53439b1496517d28cec00dc1f5094ec0049ed637553a94e20ff Jun 06 09:30:43 crc kubenswrapper[4911]: I0606 09:30:43.963631 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" path="/var/lib/kubelet/pods/eaf33d50-92ab-4ef6-8a0d-9ab0eb177204/volumes" Jun 06 09:30:44 crc kubenswrapper[4911]: I0606 09:30:44.610132 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerStarted","Data":"a70f55e0dd12864cca2293a2f4d4c5d07abe50609131c52c321dcd75ec2a7f18"} Jun 06 09:30:44 crc kubenswrapper[4911]: I0606 09:30:44.611578 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58846dd748-sgvz6" event={"ID":"b23e60ab-054b-41e9-98c1-a3b2abc02b52","Type":"ContainerStarted","Data":"c1be86cfd50feba32b7c20c8583bc25b433a165443930dc59c30590c00bb2a87"} Jun 06 09:30:44 crc kubenswrapper[4911]: I0606 09:30:44.611601 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-58846dd748-sgvz6" event={"ID":"b23e60ab-054b-41e9-98c1-a3b2abc02b52","Type":"ContainerStarted","Data":"11ad810f7291f53439b1496517d28cec00dc1f5094ec0049ed637553a94e20ff"} Jun 06 09:30:44 crc kubenswrapper[4911]: I0606 09:30:44.612334 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:30:44 crc kubenswrapper[4911]: I0606 09:30:44.614784 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-q7cj8" event={"ID":"cd2440f6-658c-4edb-938c-b40f2d3f7cf4","Type":"ContainerStarted","Data":"a153e854a31def2185439edccebff0910042704552a20e9b6fb22051e5e2a96f"} Jun 06 09:30:44 crc kubenswrapper[4911]: I0606 09:30:44.636927 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-58846dd748-sgvz6" podStartSLOduration=13.636903649 podStartE2EDuration="13.636903649s" podCreationTimestamp="2025-06-06 09:30:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:44.629247253 +0000 UTC m=+1055.904672796" watchObservedRunningTime="2025-06-06 09:30:44.636903649 +0000 UTC m=+1055.912329192" Jun 06 09:30:44 crc kubenswrapper[4911]: I0606 09:30:44.659286 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-q7cj8" podStartSLOduration=2.716105103 podStartE2EDuration="21.659253361s" podCreationTimestamp="2025-06-06 09:30:23 +0000 UTC" firstStartedPulling="2025-06-06 09:30:24.394188283 +0000 UTC m=+1035.669613826" lastFinishedPulling="2025-06-06 09:30:43.337336541 +0000 UTC m=+1054.612762084" observedRunningTime="2025-06-06 09:30:44.654578672 +0000 UTC m=+1055.930004235" watchObservedRunningTime="2025-06-06 09:30:44.659253361 +0000 UTC m=+1055.934678904" Jun 06 09:30:47 crc kubenswrapper[4911]: I0606 09:30:47.651829 4911 generic.go:334] "Generic (PLEG): container finished" podID="952f6db6-b318-4bd1-8052-653fa6484d05" containerID="d4b09a0cc37e89240a9330950c83e463d7279fcb473aca0cf411d746afc7a51c" exitCode=0 Jun 06 09:30:47 crc kubenswrapper[4911]: I0606 09:30:47.651945 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4hdj9" event={"ID":"952f6db6-b318-4bd1-8052-653fa6484d05","Type":"ContainerDied","Data":"d4b09a0cc37e89240a9330950c83e463d7279fcb473aca0cf411d746afc7a51c"} Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.319696 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.386028 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-combined-ca-bundle\") pod \"952f6db6-b318-4bd1-8052-653fa6484d05\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.386124 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-config\") pod \"952f6db6-b318-4bd1-8052-653fa6484d05\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.386179 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzl5g\" (UniqueName: \"kubernetes.io/projected/952f6db6-b318-4bd1-8052-653fa6484d05-kube-api-access-nzl5g\") pod \"952f6db6-b318-4bd1-8052-653fa6484d05\" (UID: \"952f6db6-b318-4bd1-8052-653fa6484d05\") " Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.392540 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/952f6db6-b318-4bd1-8052-653fa6484d05-kube-api-access-nzl5g" (OuterVolumeSpecName: "kube-api-access-nzl5g") pod "952f6db6-b318-4bd1-8052-653fa6484d05" (UID: "952f6db6-b318-4bd1-8052-653fa6484d05"). InnerVolumeSpecName "kube-api-access-nzl5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.415474 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-config" (OuterVolumeSpecName: "config") pod "952f6db6-b318-4bd1-8052-653fa6484d05" (UID: "952f6db6-b318-4bd1-8052-653fa6484d05"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.418663 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "952f6db6-b318-4bd1-8052-653fa6484d05" (UID: "952f6db6-b318-4bd1-8052-653fa6484d05"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.489581 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzl5g\" (UniqueName: \"kubernetes.io/projected/952f6db6-b318-4bd1-8052-653fa6484d05-kube-api-access-nzl5g\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.489633 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.489644 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/952f6db6-b318-4bd1-8052-653fa6484d05-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.712400 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4hdj9" event={"ID":"952f6db6-b318-4bd1-8052-653fa6484d05","Type":"ContainerDied","Data":"3d15d817cabaa8a3cbea80ec5dccf87b430a758f6575b7be27b2869637f8212d"} Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.712440 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d15d817cabaa8a3cbea80ec5dccf87b430a758f6575b7be27b2869637f8212d" Jun 06 09:30:53 crc kubenswrapper[4911]: I0606 09:30:53.712472 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4hdj9" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.642946 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57f4d4886c-m5nx6"] Jun 06 09:30:54 crc kubenswrapper[4911]: E0606 09:30:54.643696 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="init" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.643714 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="init" Jun 06 09:30:54 crc kubenswrapper[4911]: E0606 09:30:54.643745 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="dnsmasq-dns" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.643753 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="dnsmasq-dns" Jun 06 09:30:54 crc kubenswrapper[4911]: E0606 09:30:54.643780 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="952f6db6-b318-4bd1-8052-653fa6484d05" containerName="neutron-db-sync" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.643789 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="952f6db6-b318-4bd1-8052-653fa6484d05" containerName="neutron-db-sync" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.643991 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaf33d50-92ab-4ef6-8a0d-9ab0eb177204" containerName="dnsmasq-dns" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.644014 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="952f6db6-b318-4bd1-8052-653fa6484d05" containerName="neutron-db-sync" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.645151 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.673270 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57f4d4886c-m5nx6"] Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.717497 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-sb\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.717590 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-nb\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.717616 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-swift-storage-0\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.717712 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p66vx\" (UniqueName: \"kubernetes.io/projected/67d3c09a-3215-4dfe-8838-d621e317b13e-kube-api-access-p66vx\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.717766 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-config\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.717811 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.745449 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerStarted","Data":"6827c3f250a4fa4ada057e00951050c396d9624fc51bb7697042976cd76b068d"} Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.745653 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-central-agent" containerID="cri-o://152d22c0100c07a1682d56ea649bf3af19c5c73d1d87c99d3af0199a22288e02" gracePeriod=30 Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.746009 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.746371 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="proxy-httpd" containerID="cri-o://6827c3f250a4fa4ada057e00951050c396d9624fc51bb7697042976cd76b068d" gracePeriod=30 Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.746440 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="sg-core" containerID="cri-o://a70f55e0dd12864cca2293a2f4d4c5d07abe50609131c52c321dcd75ec2a7f18" gracePeriod=30 Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.746486 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-notification-agent" containerID="cri-o://87f695fa24644cc3770ac07f18eb4de07781e0a45500453515474faec11842f0" gracePeriod=30 Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.774414 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-bfb44fcd8-nmbvm"] Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.775834 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.778553 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.781103 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.781618 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.784363 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-s2nhg" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.808222 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-bfb44fcd8-nmbvm"] Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.824695 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p66vx\" (UniqueName: \"kubernetes.io/projected/67d3c09a-3215-4dfe-8838-d621e317b13e-kube-api-access-p66vx\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.824785 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-config\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.824828 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.824851 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-sb\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.825394 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-nb\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.825421 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-swift-storage-0\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.826145 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.974685182 podStartE2EDuration="42.826116709s" podCreationTimestamp="2025-06-06 09:30:12 +0000 UTC" firstStartedPulling="2025-06-06 09:30:13.131538374 +0000 UTC m=+1024.406963917" lastFinishedPulling="2025-06-06 09:30:53.982969901 +0000 UTC m=+1065.258395444" observedRunningTime="2025-06-06 09:30:54.777551056 +0000 UTC m=+1066.052976599" watchObservedRunningTime="2025-06-06 09:30:54.826116709 +0000 UTC m=+1066.101542262" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.827488 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-swift-storage-0\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.828366 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-config\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.829132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-sb\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.830922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-nb\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.831343 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.863056 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p66vx\" (UniqueName: \"kubernetes.io/projected/67d3c09a-3215-4dfe-8838-d621e317b13e-kube-api-access-p66vx\") pod \"dnsmasq-dns-57f4d4886c-m5nx6\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.928205 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-ovndb-tls-certs\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.928277 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-config\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.928331 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-httpd-config\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.928492 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lttlt\" (UniqueName: \"kubernetes.io/projected/e0156a37-4d0f-4bc7-812d-8d31a059e888-kube-api-access-lttlt\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.928519 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-combined-ca-bundle\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:54 crc kubenswrapper[4911]: I0606 09:30:54.977747 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.030635 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-ovndb-tls-certs\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.031019 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-config\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.031139 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-httpd-config\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.031200 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lttlt\" (UniqueName: \"kubernetes.io/projected/e0156a37-4d0f-4bc7-812d-8d31a059e888-kube-api-access-lttlt\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.031249 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-combined-ca-bundle\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.041823 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-config\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.042967 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-ovndb-tls-certs\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.044358 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-httpd-config\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.054237 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lttlt\" (UniqueName: \"kubernetes.io/projected/e0156a37-4d0f-4bc7-812d-8d31a059e888-kube-api-access-lttlt\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.064450 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-combined-ca-bundle\") pod \"neutron-bfb44fcd8-nmbvm\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.131561 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.488580 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57f4d4886c-m5nx6"] Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.758582 4911 generic.go:334] "Generic (PLEG): container finished" podID="a22e427e-b12f-479a-bb38-e83fa482a724" containerID="6827c3f250a4fa4ada057e00951050c396d9624fc51bb7697042976cd76b068d" exitCode=0 Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.758932 4911 generic.go:334] "Generic (PLEG): container finished" podID="a22e427e-b12f-479a-bb38-e83fa482a724" containerID="a70f55e0dd12864cca2293a2f4d4c5d07abe50609131c52c321dcd75ec2a7f18" exitCode=2 Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.758943 4911 generic.go:334] "Generic (PLEG): container finished" podID="a22e427e-b12f-479a-bb38-e83fa482a724" containerID="152d22c0100c07a1682d56ea649bf3af19c5c73d1d87c99d3af0199a22288e02" exitCode=0 Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.758635 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerDied","Data":"6827c3f250a4fa4ada057e00951050c396d9624fc51bb7697042976cd76b068d"} Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.759029 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerDied","Data":"a70f55e0dd12864cca2293a2f4d4c5d07abe50609131c52c321dcd75ec2a7f18"} Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.759063 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerDied","Data":"152d22c0100c07a1682d56ea649bf3af19c5c73d1d87c99d3af0199a22288e02"} Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.760798 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" event={"ID":"67d3c09a-3215-4dfe-8838-d621e317b13e","Type":"ContainerStarted","Data":"371f5d50f3e542ba446d10af4a092f6dfd875dde75b4b947f2789f080cdd24da"} Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.760859 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" event={"ID":"67d3c09a-3215-4dfe-8838-d621e317b13e","Type":"ContainerStarted","Data":"60487bd756fdd169539f538e9262d508d140656d7c31b4f50812367817b72223"} Jun 06 09:30:55 crc kubenswrapper[4911]: W0606 09:30:55.777724 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0156a37_4d0f_4bc7_812d_8d31a059e888.slice/crio-58929fed2a5fb2bdc8806cd05d143072966f65eb58544d6953d2be450dd96152 WatchSource:0}: Error finding container 58929fed2a5fb2bdc8806cd05d143072966f65eb58544d6953d2be450dd96152: Status 404 returned error can't find the container with id 58929fed2a5fb2bdc8806cd05d143072966f65eb58544d6953d2be450dd96152 Jun 06 09:30:55 crc kubenswrapper[4911]: I0606 09:30:55.782142 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-bfb44fcd8-nmbvm"] Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.319172 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.319818 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-78bb87b554-zfswk" Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.770344 4911 generic.go:334] "Generic (PLEG): container finished" podID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerID="371f5d50f3e542ba446d10af4a092f6dfd875dde75b4b947f2789f080cdd24da" exitCode=0 Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.770552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" event={"ID":"67d3c09a-3215-4dfe-8838-d621e317b13e","Type":"ContainerDied","Data":"371f5d50f3e542ba446d10af4a092f6dfd875dde75b4b947f2789f080cdd24da"} Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.774756 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bfb44fcd8-nmbvm" event={"ID":"e0156a37-4d0f-4bc7-812d-8d31a059e888","Type":"ContainerStarted","Data":"142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb"} Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.774829 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.774855 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bfb44fcd8-nmbvm" event={"ID":"e0156a37-4d0f-4bc7-812d-8d31a059e888","Type":"ContainerStarted","Data":"315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6"} Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.774870 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bfb44fcd8-nmbvm" event={"ID":"e0156a37-4d0f-4bc7-812d-8d31a059e888","Type":"ContainerStarted","Data":"58929fed2a5fb2bdc8806cd05d143072966f65eb58544d6953d2be450dd96152"} Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.824802 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-bfb44fcd8-nmbvm" podStartSLOduration=2.824780637 podStartE2EDuration="2.824780637s" podCreationTimestamp="2025-06-06 09:30:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:56.810233305 +0000 UTC m=+1068.085658868" watchObservedRunningTime="2025-06-06 09:30:56.824780637 +0000 UTC m=+1068.100206190" Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.980572 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b9f76f6d7-79lsr"] Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.984295 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:56 crc kubenswrapper[4911]: I0606 09:30:56.999241 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.024292 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.034513 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b9f76f6d7-79lsr"] Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.078580 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-httpd-config\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.078654 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-internal-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.078699 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-combined-ca-bundle\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.078756 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-public-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.078776 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-ovndb-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.078853 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2gwx\" (UniqueName: \"kubernetes.io/projected/ab86ede4-0d0e-415a-8dd3-87509499f46e-kube-api-access-k2gwx\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.078945 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-config\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.180724 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-combined-ca-bundle\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.181195 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-public-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.181224 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-ovndb-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.181288 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2gwx\" (UniqueName: \"kubernetes.io/projected/ab86ede4-0d0e-415a-8dd3-87509499f46e-kube-api-access-k2gwx\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.181363 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-config\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.181458 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-httpd-config\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.181498 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-internal-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.186686 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-internal-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.187307 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-httpd-config\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.187715 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-ovndb-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.187995 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-public-tls-certs\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.189011 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-config\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.209976 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2gwx\" (UniqueName: \"kubernetes.io/projected/ab86ede4-0d0e-415a-8dd3-87509499f46e-kube-api-access-k2gwx\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.231077 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab86ede4-0d0e-415a-8dd3-87509499f46e-combined-ca-bundle\") pod \"neutron-b9f76f6d7-79lsr\" (UID: \"ab86ede4-0d0e-415a-8dd3-87509499f46e\") " pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.332621 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.786897 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-69kzd" event={"ID":"4c18bbba-7f7b-4601-a8d0-971323c798ac","Type":"ContainerStarted","Data":"369ffb0677feb789e37b0194280b2b91350f281ed675f45995e3c2387a20b451"} Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.789797 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" event={"ID":"67d3c09a-3215-4dfe-8838-d621e317b13e","Type":"ContainerStarted","Data":"b039da457a9b87cf180af68425b552d73129d54aa620a0a9160a62b776bc8715"} Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.790775 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.811087 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-69kzd" podStartSLOduration=1.576308124 podStartE2EDuration="34.811061768s" podCreationTimestamp="2025-06-06 09:30:23 +0000 UTC" firstStartedPulling="2025-06-06 09:30:24.196896324 +0000 UTC m=+1035.472321867" lastFinishedPulling="2025-06-06 09:30:57.431649978 +0000 UTC m=+1068.707075511" observedRunningTime="2025-06-06 09:30:57.803872824 +0000 UTC m=+1069.079298367" watchObservedRunningTime="2025-06-06 09:30:57.811061768 +0000 UTC m=+1069.086487311" Jun 06 09:30:57 crc kubenswrapper[4911]: I0606 09:30:57.830723 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" podStartSLOduration=3.8307034509999998 podStartE2EDuration="3.830703451s" podCreationTimestamp="2025-06-06 09:30:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:57.822589783 +0000 UTC m=+1069.098015346" watchObservedRunningTime="2025-06-06 09:30:57.830703451 +0000 UTC m=+1069.106128994" Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.013409 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b9f76f6d7-79lsr"] Jun 06 09:30:58 crc kubenswrapper[4911]: W0606 09:30:58.015188 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab86ede4_0d0e_415a_8dd3_87509499f46e.slice/crio-776f5bffb6dba441141271b51d4a0238945362baf8435f842ff1b7f8e12e8996 WatchSource:0}: Error finding container 776f5bffb6dba441141271b51d4a0238945362baf8435f842ff1b7f8e12e8996: Status 404 returned error can't find the container with id 776f5bffb6dba441141271b51d4a0238945362baf8435f842ff1b7f8e12e8996 Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.802770 4911 generic.go:334] "Generic (PLEG): container finished" podID="a22e427e-b12f-479a-bb38-e83fa482a724" containerID="87f695fa24644cc3770ac07f18eb4de07781e0a45500453515474faec11842f0" exitCode=0 Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.803391 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerDied","Data":"87f695fa24644cc3770ac07f18eb4de07781e0a45500453515474faec11842f0"} Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.811302 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b9f76f6d7-79lsr" event={"ID":"ab86ede4-0d0e-415a-8dd3-87509499f46e","Type":"ContainerStarted","Data":"24017e5cbc4b3bdc9e9fe5e74f55ae1d138f89dd18eabe0559811ddb327c4a24"} Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.811374 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.811393 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b9f76f6d7-79lsr" event={"ID":"ab86ede4-0d0e-415a-8dd3-87509499f46e","Type":"ContainerStarted","Data":"1f34944e3450cc99fc7c96c2530779658d90d78ccd10263af28c8751e1b935df"} Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.811405 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b9f76f6d7-79lsr" event={"ID":"ab86ede4-0d0e-415a-8dd3-87509499f46e","Type":"ContainerStarted","Data":"776f5bffb6dba441141271b51d4a0238945362baf8435f842ff1b7f8e12e8996"} Jun 06 09:30:58 crc kubenswrapper[4911]: I0606 09:30:58.843355 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-b9f76f6d7-79lsr" podStartSLOduration=2.843333666 podStartE2EDuration="2.843333666s" podCreationTimestamp="2025-06-06 09:30:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:30:58.834019068 +0000 UTC m=+1070.109444621" watchObservedRunningTime="2025-06-06 09:30:58.843333666 +0000 UTC m=+1070.118759209" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.118754 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.226258 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-scripts\") pod \"a22e427e-b12f-479a-bb38-e83fa482a724\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.226359 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-sg-core-conf-yaml\") pod \"a22e427e-b12f-479a-bb38-e83fa482a724\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.226421 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-combined-ca-bundle\") pod \"a22e427e-b12f-479a-bb38-e83fa482a724\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.226506 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-run-httpd\") pod \"a22e427e-b12f-479a-bb38-e83fa482a724\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.226570 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-log-httpd\") pod \"a22e427e-b12f-479a-bb38-e83fa482a724\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.226594 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-config-data\") pod \"a22e427e-b12f-479a-bb38-e83fa482a724\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.226655 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s58v5\" (UniqueName: \"kubernetes.io/projected/a22e427e-b12f-479a-bb38-e83fa482a724-kube-api-access-s58v5\") pod \"a22e427e-b12f-479a-bb38-e83fa482a724\" (UID: \"a22e427e-b12f-479a-bb38-e83fa482a724\") " Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.228069 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a22e427e-b12f-479a-bb38-e83fa482a724" (UID: "a22e427e-b12f-479a-bb38-e83fa482a724"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.228192 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a22e427e-b12f-479a-bb38-e83fa482a724" (UID: "a22e427e-b12f-479a-bb38-e83fa482a724"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.234105 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-scripts" (OuterVolumeSpecName: "scripts") pod "a22e427e-b12f-479a-bb38-e83fa482a724" (UID: "a22e427e-b12f-479a-bb38-e83fa482a724"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.234145 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a22e427e-b12f-479a-bb38-e83fa482a724-kube-api-access-s58v5" (OuterVolumeSpecName: "kube-api-access-s58v5") pod "a22e427e-b12f-479a-bb38-e83fa482a724" (UID: "a22e427e-b12f-479a-bb38-e83fa482a724"). InnerVolumeSpecName "kube-api-access-s58v5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.259959 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a22e427e-b12f-479a-bb38-e83fa482a724" (UID: "a22e427e-b12f-479a-bb38-e83fa482a724"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.321579 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a22e427e-b12f-479a-bb38-e83fa482a724" (UID: "a22e427e-b12f-479a-bb38-e83fa482a724"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.329921 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-run-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.330053 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a22e427e-b12f-479a-bb38-e83fa482a724-log-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.330065 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s58v5\" (UniqueName: \"kubernetes.io/projected/a22e427e-b12f-479a-bb38-e83fa482a724-kube-api-access-s58v5\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.330076 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.330087 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.330113 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.392024 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-config-data" (OuterVolumeSpecName: "config-data") pod "a22e427e-b12f-479a-bb38-e83fa482a724" (UID: "a22e427e-b12f-479a-bb38-e83fa482a724"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.431467 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a22e427e-b12f-479a-bb38-e83fa482a724-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.837358 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.844117 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a22e427e-b12f-479a-bb38-e83fa482a724","Type":"ContainerDied","Data":"d30b2be46243b07374fac8e777ce0a0d21fc96f02f71a8b041f531611f75f2d3"} Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.844271 4911 scope.go:117] "RemoveContainer" containerID="6827c3f250a4fa4ada057e00951050c396d9624fc51bb7697042976cd76b068d" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.884876 4911 scope.go:117] "RemoveContainer" containerID="a70f55e0dd12864cca2293a2f4d4c5d07abe50609131c52c321dcd75ec2a7f18" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.888070 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.912508 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.918622 4911 scope.go:117] "RemoveContainer" containerID="87f695fa24644cc3770ac07f18eb4de07781e0a45500453515474faec11842f0" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.922595 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:59 crc kubenswrapper[4911]: E0606 09:30:59.923412 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="proxy-httpd" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923445 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="proxy-httpd" Jun 06 09:30:59 crc kubenswrapper[4911]: E0606 09:30:59.923500 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-central-agent" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923510 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-central-agent" Jun 06 09:30:59 crc kubenswrapper[4911]: E0606 09:30:59.923522 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="sg-core" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923530 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="sg-core" Jun 06 09:30:59 crc kubenswrapper[4911]: E0606 09:30:59.923547 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-notification-agent" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923556 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-notification-agent" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923806 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="proxy-httpd" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923842 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-central-agent" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923862 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="sg-core" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.923878 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" containerName="ceilometer-notification-agent" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.926079 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.930627 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.932636 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.933498 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.949925 4911 scope.go:117] "RemoveContainer" containerID="152d22c0100c07a1682d56ea649bf3af19c5c73d1d87c99d3af0199a22288e02" Jun 06 09:30:59 crc kubenswrapper[4911]: I0606 09:30:59.971960 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a22e427e-b12f-479a-bb38-e83fa482a724" path="/var/lib/kubelet/pods/a22e427e-b12f-479a-bb38-e83fa482a724/volumes" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.050437 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-scripts\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.050762 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-log-httpd\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.050962 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-config-data\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.051136 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.051232 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-run-httpd\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.051268 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.052026 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jwc5\" (UniqueName: \"kubernetes.io/projected/f91e2479-870e-4a44-9130-488b55e8e092-kube-api-access-5jwc5\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.154608 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-log-httpd\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.154681 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-config-data\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.154728 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.154771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-run-httpd\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.154794 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.154837 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jwc5\" (UniqueName: \"kubernetes.io/projected/f91e2479-870e-4a44-9130-488b55e8e092-kube-api-access-5jwc5\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.154861 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-scripts\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.155185 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-log-httpd\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.156023 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-run-httpd\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.162747 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.163119 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-config-data\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.163143 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-scripts\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.163224 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.173709 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jwc5\" (UniqueName: \"kubernetes.io/projected/f91e2479-870e-4a44-9130-488b55e8e092-kube-api-access-5jwc5\") pod \"ceilometer-0\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.259830 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.780558 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:31:00 crc kubenswrapper[4911]: W0606 09:31:00.788540 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf91e2479_870e_4a44_9130_488b55e8e092.slice/crio-a72ffee0585d7396f7bb578298900c55c4c151b776ac712b49b7f5fe7427b1a5 WatchSource:0}: Error finding container a72ffee0585d7396f7bb578298900c55c4c151b776ac712b49b7f5fe7427b1a5: Status 404 returned error can't find the container with id a72ffee0585d7396f7bb578298900c55c4c151b776ac712b49b7f5fe7427b1a5 Jun 06 09:31:00 crc kubenswrapper[4911]: I0606 09:31:00.845007 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerStarted","Data":"a72ffee0585d7396f7bb578298900c55c4c151b776ac712b49b7f5fe7427b1a5"} Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.426585 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-d9fcj"] Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.427759 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.583301 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b047c3eb-c044-450a-bef1-c44bc68b0274-host\") pod \"crc-debug-d9fcj\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.583603 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzsnq\" (UniqueName: \"kubernetes.io/projected/b047c3eb-c044-450a-bef1-c44bc68b0274-kube-api-access-gzsnq\") pod \"crc-debug-d9fcj\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.685787 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b047c3eb-c044-450a-bef1-c44bc68b0274-host\") pod \"crc-debug-d9fcj\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.685874 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzsnq\" (UniqueName: \"kubernetes.io/projected/b047c3eb-c044-450a-bef1-c44bc68b0274-kube-api-access-gzsnq\") pod \"crc-debug-d9fcj\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.685984 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b047c3eb-c044-450a-bef1-c44bc68b0274-host\") pod \"crc-debug-d9fcj\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.709450 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzsnq\" (UniqueName: \"kubernetes.io/projected/b047c3eb-c044-450a-bef1-c44bc68b0274-kube-api-access-gzsnq\") pod \"crc-debug-d9fcj\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.755164 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d9fcj" Jun 06 09:31:01 crc kubenswrapper[4911]: I0606 09:31:01.860836 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-d9fcj" event={"ID":"b047c3eb-c044-450a-bef1-c44bc68b0274","Type":"ContainerStarted","Data":"746b1679388eed15201248d7d9aa8cf9f7ed415bc7e7761b1577df2f5cb109c1"} Jun 06 09:31:02 crc kubenswrapper[4911]: I0606 09:31:02.886172 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4j5dr" event={"ID":"2ac55e69-d8fc-414b-add4-1d60dfcee487","Type":"ContainerStarted","Data":"bd1af89fbfa306f687233c3fcb8a5174c0ecdb8f42c26ffc0255ea9c17ecf8e9"} Jun 06 09:31:02 crc kubenswrapper[4911]: I0606 09:31:02.890895 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerStarted","Data":"6209ea7a5d34331eca6041ec96e993a32b803cf0c033bdcf34ad0f99b821bbe2"} Jun 06 09:31:02 crc kubenswrapper[4911]: I0606 09:31:02.893542 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-d9fcj" event={"ID":"b047c3eb-c044-450a-bef1-c44bc68b0274","Type":"ContainerStarted","Data":"eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784"} Jun 06 09:31:02 crc kubenswrapper[4911]: I0606 09:31:02.920162 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-4j5dr" podStartSLOduration=2.7423447210000003 podStartE2EDuration="40.920145515s" podCreationTimestamp="2025-06-06 09:30:22 +0000 UTC" firstStartedPulling="2025-06-06 09:30:23.753833535 +0000 UTC m=+1035.029259078" lastFinishedPulling="2025-06-06 09:31:01.931634329 +0000 UTC m=+1073.207059872" observedRunningTime="2025-06-06 09:31:02.918597756 +0000 UTC m=+1074.194023319" watchObservedRunningTime="2025-06-06 09:31:02.920145515 +0000 UTC m=+1074.195571058" Jun 06 09:31:02 crc kubenswrapper[4911]: I0606 09:31:02.939149 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-d9fcj" podStartSLOduration=1.939125601 podStartE2EDuration="1.939125601s" podCreationTimestamp="2025-06-06 09:31:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:31:02.930350726 +0000 UTC m=+1074.205776289" watchObservedRunningTime="2025-06-06 09:31:02.939125601 +0000 UTC m=+1074.214551144" Jun 06 09:31:03 crc kubenswrapper[4911]: I0606 09:31:03.913173 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerStarted","Data":"3e4c5a6a86e849da795b3880e47ba39ecdb3f59fe15b674fa1b0c11010d25049"} Jun 06 09:31:04 crc kubenswrapper[4911]: I0606 09:31:04.557605 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-58846dd748-sgvz6" Jun 06 09:31:04 crc kubenswrapper[4911]: I0606 09:31:04.925723 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerStarted","Data":"dab7e270f92c34ca1303f50e9cbf73d95d30f8a981e121916336054cc2e5f51d"} Jun 06 09:31:04 crc kubenswrapper[4911]: I0606 09:31:04.979252 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.047229 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7854d9c8ff-575q2"] Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.047800 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" containerID="cri-o://46aea72fa1582226af28b7a0d2a24f45a773a34bee9ad226aa18e83a26a32748" gracePeriod=10 Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.600726 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.602237 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.606956 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.608821 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.609918 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-54rmn" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.615492 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.676500 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgmfw\" (UniqueName: \"kubernetes.io/projected/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-kube-api-access-jgmfw\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.676588 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.676642 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-openstack-config\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.676725 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-openstack-config-secret\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.779256 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgmfw\" (UniqueName: \"kubernetes.io/projected/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-kube-api-access-jgmfw\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.779372 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.779441 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-openstack-config\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.779554 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-openstack-config-secret\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.781842 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-openstack-config\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.789671 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-openstack-config-secret\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.799863 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.822164 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgmfw\" (UniqueName: \"kubernetes.io/projected/5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7-kube-api-access-jgmfw\") pod \"openstackclient\" (UID: \"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7\") " pod="openstack/openstackclient" Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.938683 4911 generic.go:334] "Generic (PLEG): container finished" podID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerID="46aea72fa1582226af28b7a0d2a24f45a773a34bee9ad226aa18e83a26a32748" exitCode=0 Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.938769 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" event={"ID":"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828","Type":"ContainerDied","Data":"46aea72fa1582226af28b7a0d2a24f45a773a34bee9ad226aa18e83a26a32748"} Jun 06 09:31:05 crc kubenswrapper[4911]: I0606 09:31:05.943135 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jun 06 09:31:06 crc kubenswrapper[4911]: W0606 09:31:06.527992 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bf7afff_c3ed_491d_a6e1_5c4ab20c8ac7.slice/crio-59909555b9a592abb557ceb95c049679181e91c0ed809306ab3d0c86aa16961f WatchSource:0}: Error finding container 59909555b9a592abb557ceb95c049679181e91c0ed809306ab3d0c86aa16961f: Status 404 returned error can't find the container with id 59909555b9a592abb557ceb95c049679181e91c0ed809306ab3d0c86aa16961f Jun 06 09:31:06 crc kubenswrapper[4911]: I0606 09:31:06.547787 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jun 06 09:31:06 crc kubenswrapper[4911]: I0606 09:31:06.948792 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7","Type":"ContainerStarted","Data":"59909555b9a592abb557ceb95c049679181e91c0ed809306ab3d0c86aa16961f"} Jun 06 09:31:07 crc kubenswrapper[4911]: I0606 09:31:07.909698 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: connect: connection refused" Jun 06 09:31:12 crc kubenswrapper[4911]: I0606 09:31:12.909125 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: connect: connection refused" Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.064018 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-d9fcj"] Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.064320 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-d9fcj" podUID="b047c3eb-c044-450a-bef1-c44bc68b0274" containerName="container-00" containerID="cri-o://eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784" gracePeriod=2 Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.074652 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-d9fcj"] Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.844459 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d9fcj" Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.958024 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b047c3eb-c044-450a-bef1-c44bc68b0274-host\") pod \"b047c3eb-c044-450a-bef1-c44bc68b0274\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.958188 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzsnq\" (UniqueName: \"kubernetes.io/projected/b047c3eb-c044-450a-bef1-c44bc68b0274-kube-api-access-gzsnq\") pod \"b047c3eb-c044-450a-bef1-c44bc68b0274\" (UID: \"b047c3eb-c044-450a-bef1-c44bc68b0274\") " Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.958211 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b047c3eb-c044-450a-bef1-c44bc68b0274-host" (OuterVolumeSpecName: "host") pod "b047c3eb-c044-450a-bef1-c44bc68b0274" (UID: "b047c3eb-c044-450a-bef1-c44bc68b0274"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.958562 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b047c3eb-c044-450a-bef1-c44bc68b0274-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:14 crc kubenswrapper[4911]: I0606 09:31:14.965375 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b047c3eb-c044-450a-bef1-c44bc68b0274-kube-api-access-gzsnq" (OuterVolumeSpecName: "kube-api-access-gzsnq") pod "b047c3eb-c044-450a-bef1-c44bc68b0274" (UID: "b047c3eb-c044-450a-bef1-c44bc68b0274"). InnerVolumeSpecName "kube-api-access-gzsnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:31:15 crc kubenswrapper[4911]: I0606 09:31:15.034276 4911 generic.go:334] "Generic (PLEG): container finished" podID="b047c3eb-c044-450a-bef1-c44bc68b0274" containerID="eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784" exitCode=0 Jun 06 09:31:15 crc kubenswrapper[4911]: I0606 09:31:15.034356 4911 scope.go:117] "RemoveContainer" containerID="eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784" Jun 06 09:31:15 crc kubenswrapper[4911]: I0606 09:31:15.034356 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d9fcj" Jun 06 09:31:15 crc kubenswrapper[4911]: I0606 09:31:15.060808 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzsnq\" (UniqueName: \"kubernetes.io/projected/b047c3eb-c044-450a-bef1-c44bc68b0274-kube-api-access-gzsnq\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:15 crc kubenswrapper[4911]: I0606 09:31:15.960315 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b047c3eb-c044-450a-bef1-c44bc68b0274" path="/var/lib/kubelet/pods/b047c3eb-c044-450a-bef1-c44bc68b0274/volumes" Jun 06 09:31:22 crc kubenswrapper[4911]: I0606 09:31:22.909443 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: i/o timeout" Jun 06 09:31:22 crc kubenswrapper[4911]: I0606 09:31:22.911465 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:31:24 crc kubenswrapper[4911]: I0606 09:31:24.299830 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:31:24 crc kubenswrapper[4911]: I0606 09:31:24.300156 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:31:27 crc kubenswrapper[4911]: I0606 09:31:27.341539 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-b9f76f6d7-79lsr" podUID="ab86ede4-0d0e-415a-8dd3-87509499f46e" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jun 06 09:31:27 crc kubenswrapper[4911]: I0606 09:31:27.342440 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-b9f76f6d7-79lsr" podUID="ab86ede4-0d0e-415a-8dd3-87509499f46e" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jun 06 09:31:27 crc kubenswrapper[4911]: I0606 09:31:27.343165 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-b9f76f6d7-79lsr" podUID="ab86ede4-0d0e-415a-8dd3-87509499f46e" containerName="neutron-api" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jun 06 09:31:27 crc kubenswrapper[4911]: I0606 09:31:27.911732 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: i/o timeout" Jun 06 09:31:32 crc kubenswrapper[4911]: I0606 09:31:32.912653 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: i/o timeout" Jun 06 09:31:37 crc kubenswrapper[4911]: I0606 09:31:37.913621 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: i/o timeout" Jun 06 09:31:42 crc kubenswrapper[4911]: I0606 09:31:42.914725 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: i/o timeout" Jun 06 09:31:44 crc kubenswrapper[4911]: E0606 09:31:44.513201 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Jun 06 09:31:44 crc kubenswrapper[4911]: E0606 09:31:44.513392 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5fbh66fh666h96h98h678h54dh55dh599h659h574h5c9h5b7h57dhb6h77h65ch578h65ch659h656h5c6h69hbh58ch668h9bh5f7hd4h5b8h5h5dq,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jgmfw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jun 06 09:31:44 crc kubenswrapper[4911]: E0606 09:31:44.514635 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.264808 4911 scope.go:117] "RemoveContainer" containerID="eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784" Jun 06 09:31:45 crc kubenswrapper[4911]: E0606 09:31:45.265585 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784\": container with ID starting with eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784 not found: ID does not exist" containerID="eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.265618 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784"} err="failed to get container status \"eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784\": rpc error: code = NotFound desc = could not find container \"eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784\": container with ID starting with eb2fb7a6d368302ab1d773d04f172918b429fadc95a85bd6c5d27dd290351784 not found: ID does not exist" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.288822 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.302396 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.302686 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" event={"ID":"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828","Type":"ContainerDied","Data":"097797275e3a5ae8afe6c1f9e1b103637b917b5a518f6f19ba1a072f94d60098"} Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.302724 4911 scope.go:117] "RemoveContainer" containerID="46aea72fa1582226af28b7a0d2a24f45a773a34bee9ad226aa18e83a26a32748" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.340430 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8tfc\" (UniqueName: \"kubernetes.io/projected/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-kube-api-access-x8tfc\") pod \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.340497 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-config\") pod \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.340601 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-nb\") pod \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.340637 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-sb\") pod \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.340695 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-swift-storage-0\") pod \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.340756 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-svc\") pod \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\" (UID: \"72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828\") " Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.358462 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-kube-api-access-x8tfc" (OuterVolumeSpecName: "kube-api-access-x8tfc") pod "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" (UID: "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828"). InnerVolumeSpecName "kube-api-access-x8tfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.396650 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" (UID: "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.398208 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" (UID: "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.400619 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" (UID: "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.406521 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" (UID: "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.412755 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-config" (OuterVolumeSpecName: "config") pod "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" (UID: "72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:31:45 crc kubenswrapper[4911]: E0606 09:31:45.436845 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.442942 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.442987 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.443001 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.443014 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.443028 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8tfc\" (UniqueName: \"kubernetes.io/projected/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-kube-api-access-x8tfc\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.443044 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.451514 4911 scope.go:117] "RemoveContainer" containerID="443ceda3db9f7c9198e9582b4218e5bcbcf84576cb2b103e791a6ee9987676be" Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.660238 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7854d9c8ff-575q2"] Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.669213 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7854d9c8ff-575q2"] Jun 06 09:31:45 crc kubenswrapper[4911]: I0606 09:31:45.959478 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" path="/var/lib/kubelet/pods/72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828/volumes" Jun 06 09:31:46 crc kubenswrapper[4911]: I0606 09:31:46.320774 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerStarted","Data":"b9fb3df3ffbc235df5668ad681e5c31b5089999c15db5542477dd249a3ccf9ee"} Jun 06 09:31:46 crc kubenswrapper[4911]: I0606 09:31:46.321154 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jun 06 09:31:46 crc kubenswrapper[4911]: I0606 09:31:46.361718 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.636907427 podStartE2EDuration="47.361692494s" podCreationTimestamp="2025-06-06 09:30:59 +0000 UTC" firstStartedPulling="2025-06-06 09:31:00.79141878 +0000 UTC m=+1072.066844323" lastFinishedPulling="2025-06-06 09:31:45.516203847 +0000 UTC m=+1116.791629390" observedRunningTime="2025-06-06 09:31:46.345677944 +0000 UTC m=+1117.621103487" watchObservedRunningTime="2025-06-06 09:31:46.361692494 +0000 UTC m=+1117.637118037" Jun 06 09:31:47 crc kubenswrapper[4911]: I0606 09:31:47.915956 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7854d9c8ff-575q2" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.134:5353: i/o timeout" Jun 06 09:31:54 crc kubenswrapper[4911]: I0606 09:31:54.301066 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:31:54 crc kubenswrapper[4911]: I0606 09:31:54.301770 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:31:55 crc kubenswrapper[4911]: I0606 09:31:55.261336 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-bfb44fcd8-nmbvm" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-api" probeResult="failure" output="Get \"http://10.217.0.152:9696/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jun 06 09:31:55 crc kubenswrapper[4911]: I0606 09:31:55.261339 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-bfb44fcd8-nmbvm" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-httpd" probeResult="failure" output="Get \"http://10.217.0.152:9696/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jun 06 09:31:55 crc kubenswrapper[4911]: I0606 09:31:55.261347 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-bfb44fcd8-nmbvm" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-httpd" probeResult="failure" output="Get \"http://10.217.0.152:9696/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jun 06 09:31:55 crc kubenswrapper[4911]: I0606 09:31:55.541564 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:31:58 crc kubenswrapper[4911]: I0606 09:31:58.222896 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-b9f76f6d7-79lsr" Jun 06 09:31:58 crc kubenswrapper[4911]: I0606 09:31:58.289340 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-bfb44fcd8-nmbvm"] Jun 06 09:31:58 crc kubenswrapper[4911]: I0606 09:31:58.289935 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-bfb44fcd8-nmbvm" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-api" containerID="cri-o://315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6" gracePeriod=30 Jun 06 09:31:58 crc kubenswrapper[4911]: I0606 09:31:58.290113 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-bfb44fcd8-nmbvm" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-httpd" containerID="cri-o://142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb" gracePeriod=30 Jun 06 09:31:59 crc kubenswrapper[4911]: I0606 09:31:59.440787 4911 generic.go:334] "Generic (PLEG): container finished" podID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerID="142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb" exitCode=0 Jun 06 09:31:59 crc kubenswrapper[4911]: I0606 09:31:59.440883 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bfb44fcd8-nmbvm" event={"ID":"e0156a37-4d0f-4bc7-812d-8d31a059e888","Type":"ContainerDied","Data":"142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb"} Jun 06 09:31:59 crc kubenswrapper[4911]: I0606 09:31:59.955511 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:32:00 crc kubenswrapper[4911]: I0606 09:32:00.266902 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jun 06 09:32:00 crc kubenswrapper[4911]: I0606 09:32:00.450981 4911 generic.go:334] "Generic (PLEG): container finished" podID="4c18bbba-7f7b-4601-a8d0-971323c798ac" containerID="369ffb0677feb789e37b0194280b2b91350f281ed675f45995e3c2387a20b451" exitCode=0 Jun 06 09:32:00 crc kubenswrapper[4911]: I0606 09:32:00.451035 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-69kzd" event={"ID":"4c18bbba-7f7b-4601-a8d0-971323c798ac","Type":"ContainerDied","Data":"369ffb0677feb789e37b0194280b2b91350f281ed675f45995e3c2387a20b451"} Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.121542 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.236558 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-combined-ca-bundle\") pod \"e0156a37-4d0f-4bc7-812d-8d31a059e888\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.236725 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lttlt\" (UniqueName: \"kubernetes.io/projected/e0156a37-4d0f-4bc7-812d-8d31a059e888-kube-api-access-lttlt\") pod \"e0156a37-4d0f-4bc7-812d-8d31a059e888\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.236775 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-config\") pod \"e0156a37-4d0f-4bc7-812d-8d31a059e888\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.236812 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-httpd-config\") pod \"e0156a37-4d0f-4bc7-812d-8d31a059e888\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.237001 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-ovndb-tls-certs\") pod \"e0156a37-4d0f-4bc7-812d-8d31a059e888\" (UID: \"e0156a37-4d0f-4bc7-812d-8d31a059e888\") " Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.241585 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0156a37-4d0f-4bc7-812d-8d31a059e888-kube-api-access-lttlt" (OuterVolumeSpecName: "kube-api-access-lttlt") pod "e0156a37-4d0f-4bc7-812d-8d31a059e888" (UID: "e0156a37-4d0f-4bc7-812d-8d31a059e888"). InnerVolumeSpecName "kube-api-access-lttlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.241830 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "e0156a37-4d0f-4bc7-812d-8d31a059e888" (UID: "e0156a37-4d0f-4bc7-812d-8d31a059e888"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.291414 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0156a37-4d0f-4bc7-812d-8d31a059e888" (UID: "e0156a37-4d0f-4bc7-812d-8d31a059e888"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.293458 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-config" (OuterVolumeSpecName: "config") pod "e0156a37-4d0f-4bc7-812d-8d31a059e888" (UID: "e0156a37-4d0f-4bc7-812d-8d31a059e888"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.310175 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "e0156a37-4d0f-4bc7-812d-8d31a059e888" (UID: "e0156a37-4d0f-4bc7-812d-8d31a059e888"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.339524 4911 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.339563 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.339574 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lttlt\" (UniqueName: \"kubernetes.io/projected/e0156a37-4d0f-4bc7-812d-8d31a059e888-kube-api-access-lttlt\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.339584 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.339594 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e0156a37-4d0f-4bc7-812d-8d31a059e888-httpd-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.461477 4911 generic.go:334] "Generic (PLEG): container finished" podID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerID="315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6" exitCode=0 Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.461526 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bfb44fcd8-nmbvm" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.461523 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bfb44fcd8-nmbvm" event={"ID":"e0156a37-4d0f-4bc7-812d-8d31a059e888","Type":"ContainerDied","Data":"315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6"} Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.461579 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bfb44fcd8-nmbvm" event={"ID":"e0156a37-4d0f-4bc7-812d-8d31a059e888","Type":"ContainerDied","Data":"58929fed2a5fb2bdc8806cd05d143072966f65eb58544d6953d2be450dd96152"} Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.461599 4911 scope.go:117] "RemoveContainer" containerID="142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.463557 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7","Type":"ContainerStarted","Data":"7ed89ea5146517139d99bf6d58087d80b2ddd2f613ca0000bc22ec1a83317321"} Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.478743 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.18184854 podStartE2EDuration="56.478722905s" podCreationTimestamp="2025-06-06 09:31:05 +0000 UTC" firstStartedPulling="2025-06-06 09:31:06.53167654 +0000 UTC m=+1077.807102083" lastFinishedPulling="2025-06-06 09:32:00.828550905 +0000 UTC m=+1132.103976448" observedRunningTime="2025-06-06 09:32:01.478268013 +0000 UTC m=+1132.753693556" watchObservedRunningTime="2025-06-06 09:32:01.478722905 +0000 UTC m=+1132.754148448" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.502379 4911 scope.go:117] "RemoveContainer" containerID="315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.504606 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-bfb44fcd8-nmbvm"] Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.510556 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-bfb44fcd8-nmbvm"] Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.541298 4911 scope.go:117] "RemoveContainer" containerID="142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb" Jun 06 09:32:01 crc kubenswrapper[4911]: E0606 09:32:01.542457 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb\": container with ID starting with 142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb not found: ID does not exist" containerID="142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.542500 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb"} err="failed to get container status \"142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb\": rpc error: code = NotFound desc = could not find container \"142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb\": container with ID starting with 142fe24b201dde165c88cd18c436f4ddeb615f32dc3f6bfb2ce03b07370ef6cb not found: ID does not exist" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.542538 4911 scope.go:117] "RemoveContainer" containerID="315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6" Jun 06 09:32:01 crc kubenswrapper[4911]: E0606 09:32:01.546677 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6\": container with ID starting with 315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6 not found: ID does not exist" containerID="315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.546725 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6"} err="failed to get container status \"315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6\": rpc error: code = NotFound desc = could not find container \"315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6\": container with ID starting with 315b9a13e0244a4ab2e5db98e0b72cfe46f667076acc9148a46c0ce60d1774d6 not found: ID does not exist" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.606797 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-qjvkc"] Jun 06 09:32:01 crc kubenswrapper[4911]: E0606 09:32:01.607185 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b047c3eb-c044-450a-bef1-c44bc68b0274" containerName="container-00" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607198 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b047c3eb-c044-450a-bef1-c44bc68b0274" containerName="container-00" Jun 06 09:32:01 crc kubenswrapper[4911]: E0606 09:32:01.607219 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-httpd" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607226 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-httpd" Jun 06 09:32:01 crc kubenswrapper[4911]: E0606 09:32:01.607242 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="init" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607249 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="init" Jun 06 09:32:01 crc kubenswrapper[4911]: E0606 09:32:01.607275 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607282 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" Jun 06 09:32:01 crc kubenswrapper[4911]: E0606 09:32:01.607298 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-api" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607305 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-api" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607484 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="72d29d45-b31c-4d9e-9d2a-f9c1f7ee1828" containerName="dnsmasq-dns" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607504 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-api" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607519 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b047c3eb-c044-450a-bef1-c44bc68b0274" containerName="container-00" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.607537 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" containerName="neutron-httpd" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.608234 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.747583 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dvmm\" (UniqueName: \"kubernetes.io/projected/a6c02116-6df3-432f-808f-9417c69e5531-kube-api-access-2dvmm\") pod \"crc-debug-qjvkc\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.747651 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6c02116-6df3-432f-808f-9417c69e5531-host\") pod \"crc-debug-qjvkc\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.850745 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dvmm\" (UniqueName: \"kubernetes.io/projected/a6c02116-6df3-432f-808f-9417c69e5531-kube-api-access-2dvmm\") pod \"crc-debug-qjvkc\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.850814 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6c02116-6df3-432f-808f-9417c69e5531-host\") pod \"crc-debug-qjvkc\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.850963 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6c02116-6df3-432f-808f-9417c69e5531-host\") pod \"crc-debug-qjvkc\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.870241 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dvmm\" (UniqueName: \"kubernetes.io/projected/a6c02116-6df3-432f-808f-9417c69e5531-kube-api-access-2dvmm\") pod \"crc-debug-qjvkc\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.934114 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjvkc" Jun 06 09:32:01 crc kubenswrapper[4911]: I0606 09:32:01.960082 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0156a37-4d0f-4bc7-812d-8d31a059e888" path="/var/lib/kubelet/pods/e0156a37-4d0f-4bc7-812d-8d31a059e888/volumes" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.140187 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-69kzd" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.157139 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.157457 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-central-agent" containerID="cri-o://6209ea7a5d34331eca6041ec96e993a32b803cf0c033bdcf34ad0f99b821bbe2" gracePeriod=30 Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.157491 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="sg-core" containerID="cri-o://dab7e270f92c34ca1303f50e9cbf73d95d30f8a981e121916336054cc2e5f51d" gracePeriod=30 Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.157491 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-notification-agent" containerID="cri-o://3e4c5a6a86e849da795b3880e47ba39ecdb3f59fe15b674fa1b0c11010d25049" gracePeriod=30 Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.157524 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="proxy-httpd" containerID="cri-o://b9fb3df3ffbc235df5668ad681e5c31b5089999c15db5542477dd249a3ccf9ee" gracePeriod=30 Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.258329 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-combined-ca-bundle\") pod \"4c18bbba-7f7b-4601-a8d0-971323c798ac\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.258402 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25lpk\" (UniqueName: \"kubernetes.io/projected/4c18bbba-7f7b-4601-a8d0-971323c798ac-kube-api-access-25lpk\") pod \"4c18bbba-7f7b-4601-a8d0-971323c798ac\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.258548 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-db-sync-config-data\") pod \"4c18bbba-7f7b-4601-a8d0-971323c798ac\" (UID: \"4c18bbba-7f7b-4601-a8d0-971323c798ac\") " Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.262941 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c18bbba-7f7b-4601-a8d0-971323c798ac-kube-api-access-25lpk" (OuterVolumeSpecName: "kube-api-access-25lpk") pod "4c18bbba-7f7b-4601-a8d0-971323c798ac" (UID: "4c18bbba-7f7b-4601-a8d0-971323c798ac"). InnerVolumeSpecName "kube-api-access-25lpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.263158 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4c18bbba-7f7b-4601-a8d0-971323c798ac" (UID: "4c18bbba-7f7b-4601-a8d0-971323c798ac"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.292226 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c18bbba-7f7b-4601-a8d0-971323c798ac" (UID: "4c18bbba-7f7b-4601-a8d0-971323c798ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.361308 4911 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.361357 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c18bbba-7f7b-4601-a8d0-971323c798ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.361372 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25lpk\" (UniqueName: \"kubernetes.io/projected/4c18bbba-7f7b-4601-a8d0-971323c798ac-kube-api-access-25lpk\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.474833 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-69kzd" event={"ID":"4c18bbba-7f7b-4601-a8d0-971323c798ac","Type":"ContainerDied","Data":"f6efb9ccfe37f92964294bb788eb068a3c286bffca3b68e6e8c1fc9db2d3b5e8"} Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.474881 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6efb9ccfe37f92964294bb788eb068a3c286bffca3b68e6e8c1fc9db2d3b5e8" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.474940 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-69kzd" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.477715 4911 generic.go:334] "Generic (PLEG): container finished" podID="f91e2479-870e-4a44-9130-488b55e8e092" containerID="b9fb3df3ffbc235df5668ad681e5c31b5089999c15db5542477dd249a3ccf9ee" exitCode=0 Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.477750 4911 generic.go:334] "Generic (PLEG): container finished" podID="f91e2479-870e-4a44-9130-488b55e8e092" containerID="dab7e270f92c34ca1303f50e9cbf73d95d30f8a981e121916336054cc2e5f51d" exitCode=2 Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.477810 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerDied","Data":"b9fb3df3ffbc235df5668ad681e5c31b5089999c15db5542477dd249a3ccf9ee"} Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.477873 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerDied","Data":"dab7e270f92c34ca1303f50e9cbf73d95d30f8a981e121916336054cc2e5f51d"} Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.479118 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qjvkc" event={"ID":"a6c02116-6df3-432f-808f-9417c69e5531","Type":"ContainerStarted","Data":"ae4e870ca3bceb41d51fdece0cf6a3ebd106e9085d9fecc9abd35645c76f943c"} Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.479153 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qjvkc" event={"ID":"a6c02116-6df3-432f-808f-9417c69e5531","Type":"ContainerStarted","Data":"8275d636a3e4378769ae8875a9056f7f17926918b795d68fd7a9fafd2b257b27"} Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.530823 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-qjvkc" podStartSLOduration=1.530802539 podStartE2EDuration="1.530802539s" podCreationTimestamp="2025-06-06 09:32:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:02.526219072 +0000 UTC m=+1133.801644625" watchObservedRunningTime="2025-06-06 09:32:02.530802539 +0000 UTC m=+1133.806228082" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.731176 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6b8576d7c9-bckww"] Jun 06 09:32:02 crc kubenswrapper[4911]: E0606 09:32:02.731687 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c18bbba-7f7b-4601-a8d0-971323c798ac" containerName="barbican-db-sync" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.731713 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c18bbba-7f7b-4601-a8d0-971323c798ac" containerName="barbican-db-sync" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.731919 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c18bbba-7f7b-4601-a8d0-971323c798ac" containerName="barbican-db-sync" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.732923 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.738145 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.738438 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.738572 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-bw2dv" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.742242 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-64cdf9799b-mrpz2"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.744142 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.746023 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.754341 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b8576d7c9-bckww"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.767813 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-64cdf9799b-mrpz2"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.816485 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd77fdfb5-lr9bf"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.826699 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.845492 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd77fdfb5-lr9bf"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875111 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-combined-ca-bundle\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875209 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-config-data-custom\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875254 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b8904bf-3086-4a57-b18c-2f113621fc14-logs\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875280 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5bfc6577-537e-444b-aeab-e3f12ef96053-logs\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875301 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-config-data\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875337 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-config-data-custom\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875372 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-combined-ca-bundle\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875392 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-config-data\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875437 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzlzd\" (UniqueName: \"kubernetes.io/projected/8b8904bf-3086-4a57-b18c-2f113621fc14-kube-api-access-fzlzd\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.875470 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g88d\" (UniqueName: \"kubernetes.io/projected/5bfc6577-537e-444b-aeab-e3f12ef96053-kube-api-access-2g88d\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.933919 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-58b6f8f48b-4n9zh"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.935638 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.939420 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.947991 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-58b6f8f48b-4n9zh"] Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977564 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-swift-storage-0\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977641 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-config-data-custom\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977700 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-combined-ca-bundle\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977720 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-config-data\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977766 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-sb\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977796 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzlzd\" (UniqueName: \"kubernetes.io/projected/8b8904bf-3086-4a57-b18c-2f113621fc14-kube-api-access-fzlzd\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977832 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g88d\" (UniqueName: \"kubernetes.io/projected/5bfc6577-537e-444b-aeab-e3f12ef96053-kube-api-access-2g88d\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977868 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-svc\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977903 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-config\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-nb\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.977970 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-combined-ca-bundle\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.978082 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-config-data-custom\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.978127 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b8904bf-3086-4a57-b18c-2f113621fc14-logs\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.978153 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5bfc6577-537e-444b-aeab-e3f12ef96053-logs\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.978179 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-config-data\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.978224 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmv9x\" (UniqueName: \"kubernetes.io/projected/cedbd680-e4db-4d32-8a18-ceebd62e89d0-kube-api-access-gmv9x\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.979677 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5bfc6577-537e-444b-aeab-e3f12ef96053-logs\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.980079 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b8904bf-3086-4a57-b18c-2f113621fc14-logs\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.985822 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-config-data-custom\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.987799 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-config-data-custom\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:02 crc kubenswrapper[4911]: I0606 09:32:02.988481 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-combined-ca-bundle\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.003750 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-combined-ca-bundle\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.003746 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bfc6577-537e-444b-aeab-e3f12ef96053-config-data\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.005277 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b8904bf-3086-4a57-b18c-2f113621fc14-config-data\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.006862 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g88d\" (UniqueName: \"kubernetes.io/projected/5bfc6577-537e-444b-aeab-e3f12ef96053-kube-api-access-2g88d\") pod \"barbican-keystone-listener-64cdf9799b-mrpz2\" (UID: \"5bfc6577-537e-444b-aeab-e3f12ef96053\") " pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.013606 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzlzd\" (UniqueName: \"kubernetes.io/projected/8b8904bf-3086-4a57-b18c-2f113621fc14-kube-api-access-fzlzd\") pod \"barbican-worker-6b8576d7c9-bckww\" (UID: \"8b8904bf-3086-4a57-b18c-2f113621fc14\") " pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.067812 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b8576d7c9-bckww" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.075838 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.079839 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data-custom\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.079963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-sb\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.080059 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-svc\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.080123 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-config\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.080170 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-nb\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.080500 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltrwb\" (UniqueName: \"kubernetes.io/projected/de868b44-b17e-418a-9194-58d24fc9af42-kube-api-access-ltrwb\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081054 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-nb\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081246 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-config\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081279 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de868b44-b17e-418a-9194-58d24fc9af42-logs\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081263 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-sb\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081480 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-combined-ca-bundle\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081560 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-svc\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081584 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmv9x\" (UniqueName: \"kubernetes.io/projected/cedbd680-e4db-4d32-8a18-ceebd62e89d0-kube-api-access-gmv9x\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081795 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.081870 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-swift-storage-0\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.082776 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-swift-storage-0\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.100755 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmv9x\" (UniqueName: \"kubernetes.io/projected/cedbd680-e4db-4d32-8a18-ceebd62e89d0-kube-api-access-gmv9x\") pod \"dnsmasq-dns-cd77fdfb5-lr9bf\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.147884 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.184220 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltrwb\" (UniqueName: \"kubernetes.io/projected/de868b44-b17e-418a-9194-58d24fc9af42-kube-api-access-ltrwb\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.184262 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de868b44-b17e-418a-9194-58d24fc9af42-logs\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.184303 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-combined-ca-bundle\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.184339 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.184375 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data-custom\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.184997 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de868b44-b17e-418a-9194-58d24fc9af42-logs\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.190638 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data-custom\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.191379 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-combined-ca-bundle\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.198385 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.209837 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltrwb\" (UniqueName: \"kubernetes.io/projected/de868b44-b17e-418a-9194-58d24fc9af42-kube-api-access-ltrwb\") pod \"barbican-api-58b6f8f48b-4n9zh\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.261307 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.518825 4911 generic.go:334] "Generic (PLEG): container finished" podID="f91e2479-870e-4a44-9130-488b55e8e092" containerID="3e4c5a6a86e849da795b3880e47ba39ecdb3f59fe15b674fa1b0c11010d25049" exitCode=0 Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.519207 4911 generic.go:334] "Generic (PLEG): container finished" podID="f91e2479-870e-4a44-9130-488b55e8e092" containerID="6209ea7a5d34331eca6041ec96e993a32b803cf0c033bdcf34ad0f99b821bbe2" exitCode=0 Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.520031 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerDied","Data":"3e4c5a6a86e849da795b3880e47ba39ecdb3f59fe15b674fa1b0c11010d25049"} Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.520059 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerDied","Data":"6209ea7a5d34331eca6041ec96e993a32b803cf0c033bdcf34ad0f99b821bbe2"} Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.599661 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-64cdf9799b-mrpz2"] Jun 06 09:32:03 crc kubenswrapper[4911]: W0606 09:32:03.818872 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b8904bf_3086_4a57_b18c_2f113621fc14.slice/crio-629ec8028ea7ee132e7cb4e17f5859f72795530f851a1cbf73dc14969d633120 WatchSource:0}: Error finding container 629ec8028ea7ee132e7cb4e17f5859f72795530f851a1cbf73dc14969d633120: Status 404 returned error can't find the container with id 629ec8028ea7ee132e7cb4e17f5859f72795530f851a1cbf73dc14969d633120 Jun 06 09:32:03 crc kubenswrapper[4911]: I0606 09:32:03.819966 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b8576d7c9-bckww"] Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.083187 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd77fdfb5-lr9bf"] Jun 06 09:32:04 crc kubenswrapper[4911]: W0606 09:32:04.180425 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde868b44_b17e_418a_9194_58d24fc9af42.slice/crio-644a9678f18c567cab1d8fe4ef4195edcd0fdc46ba518524a219bde3a5289645 WatchSource:0}: Error finding container 644a9678f18c567cab1d8fe4ef4195edcd0fdc46ba518524a219bde3a5289645: Status 404 returned error can't find the container with id 644a9678f18c567cab1d8fe4ef4195edcd0fdc46ba518524a219bde3a5289645 Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.184883 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-58b6f8f48b-4n9zh"] Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.213304 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.314086 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-scripts\") pod \"f91e2479-870e-4a44-9130-488b55e8e092\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.314600 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jwc5\" (UniqueName: \"kubernetes.io/projected/f91e2479-870e-4a44-9130-488b55e8e092-kube-api-access-5jwc5\") pod \"f91e2479-870e-4a44-9130-488b55e8e092\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.314632 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-log-httpd\") pod \"f91e2479-870e-4a44-9130-488b55e8e092\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.314749 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-combined-ca-bundle\") pod \"f91e2479-870e-4a44-9130-488b55e8e092\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.315124 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-run-httpd\") pod \"f91e2479-870e-4a44-9130-488b55e8e092\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.315185 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-config-data\") pod \"f91e2479-870e-4a44-9130-488b55e8e092\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.315226 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-sg-core-conf-yaml\") pod \"f91e2479-870e-4a44-9130-488b55e8e092\" (UID: \"f91e2479-870e-4a44-9130-488b55e8e092\") " Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.315484 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f91e2479-870e-4a44-9130-488b55e8e092" (UID: "f91e2479-870e-4a44-9130-488b55e8e092"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.315842 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f91e2479-870e-4a44-9130-488b55e8e092" (UID: "f91e2479-870e-4a44-9130-488b55e8e092"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.316055 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-log-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.316075 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f91e2479-870e-4a44-9130-488b55e8e092-run-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.321804 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-scripts" (OuterVolumeSpecName: "scripts") pod "f91e2479-870e-4a44-9130-488b55e8e092" (UID: "f91e2479-870e-4a44-9130-488b55e8e092"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.328164 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f91e2479-870e-4a44-9130-488b55e8e092-kube-api-access-5jwc5" (OuterVolumeSpecName: "kube-api-access-5jwc5") pod "f91e2479-870e-4a44-9130-488b55e8e092" (UID: "f91e2479-870e-4a44-9130-488b55e8e092"). InnerVolumeSpecName "kube-api-access-5jwc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.398360 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f91e2479-870e-4a44-9130-488b55e8e092" (UID: "f91e2479-870e-4a44-9130-488b55e8e092"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.419034 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.419076 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jwc5\" (UniqueName: \"kubernetes.io/projected/f91e2479-870e-4a44-9130-488b55e8e092-kube-api-access-5jwc5\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.419087 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.433181 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-59f44bc869-n7dl7"] Jun 06 09:32:04 crc kubenswrapper[4911]: E0606 09:32:04.434004 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-notification-agent" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.434115 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-notification-agent" Jun 06 09:32:04 crc kubenswrapper[4911]: E0606 09:32:04.434229 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-central-agent" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.434747 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-central-agent" Jun 06 09:32:04 crc kubenswrapper[4911]: E0606 09:32:04.434910 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="sg-core" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.435035 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="sg-core" Jun 06 09:32:04 crc kubenswrapper[4911]: E0606 09:32:04.435204 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="proxy-httpd" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.435322 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="proxy-httpd" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.435622 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="proxy-httpd" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.435717 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-central-agent" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.435792 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="ceilometer-notification-agent" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.436143 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f91e2479-870e-4a44-9130-488b55e8e092" containerName="sg-core" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.438406 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.443452 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.443512 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.443602 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.454760 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-59f44bc869-n7dl7"] Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.472995 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f91e2479-870e-4a44-9130-488b55e8e092" (UID: "f91e2479-870e-4a44-9130-488b55e8e092"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.510788 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-config-data" (OuterVolumeSpecName: "config-data") pod "f91e2479-870e-4a44-9130-488b55e8e092" (UID: "f91e2479-870e-4a44-9130-488b55e8e092"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523210 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq8lt\" (UniqueName: \"kubernetes.io/projected/72e5a926-1c68-4e9b-9240-44c27d488e36-kube-api-access-tq8lt\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523266 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-public-tls-certs\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523317 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-internal-tls-certs\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523499 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-combined-ca-bundle\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523646 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/72e5a926-1c68-4e9b-9240-44c27d488e36-etc-swift\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523672 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72e5a926-1c68-4e9b-9240-44c27d488e36-run-httpd\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523702 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72e5a926-1c68-4e9b-9240-44c27d488e36-log-httpd\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.523784 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-config-data\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.524078 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.524131 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91e2479-870e-4a44-9130-488b55e8e092-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.533139 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8576d7c9-bckww" event={"ID":"8b8904bf-3086-4a57-b18c-2f113621fc14","Type":"ContainerStarted","Data":"629ec8028ea7ee132e7cb4e17f5859f72795530f851a1cbf73dc14969d633120"} Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.559280 4911 generic.go:334] "Generic (PLEG): container finished" podID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerID="ecae0cfb0db853faba14a84bade72b105ac55d822b1fd89734875412ba8c13f7" exitCode=0 Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.559407 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" event={"ID":"cedbd680-e4db-4d32-8a18-ceebd62e89d0","Type":"ContainerDied","Data":"ecae0cfb0db853faba14a84bade72b105ac55d822b1fd89734875412ba8c13f7"} Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.559436 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" event={"ID":"cedbd680-e4db-4d32-8a18-ceebd62e89d0","Type":"ContainerStarted","Data":"391306d0d2c08a36f986dc43f3e89715f98ec7ab2ae638d701d8b2425eb37f55"} Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.569366 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f91e2479-870e-4a44-9130-488b55e8e092","Type":"ContainerDied","Data":"a72ffee0585d7396f7bb578298900c55c4c151b776ac712b49b7f5fe7427b1a5"} Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.569662 4911 scope.go:117] "RemoveContainer" containerID="b9fb3df3ffbc235df5668ad681e5c31b5089999c15db5542477dd249a3ccf9ee" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.569402 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.605696 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b6f8f48b-4n9zh" event={"ID":"de868b44-b17e-418a-9194-58d24fc9af42","Type":"ContainerStarted","Data":"d56122564610d000fecbf3e19a2ad90f1d89a0bd6a45975fc3142fd705e352c9"} Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.605749 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b6f8f48b-4n9zh" event={"ID":"de868b44-b17e-418a-9194-58d24fc9af42","Type":"ContainerStarted","Data":"644a9678f18c567cab1d8fe4ef4195edcd0fdc46ba518524a219bde3a5289645"} Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.608238 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" event={"ID":"5bfc6577-537e-444b-aeab-e3f12ef96053","Type":"ContainerStarted","Data":"ad55270d4a33828fe7fec2d75102082bb79befb7419fafa5d3ccd170cd42ed63"} Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.625934 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-public-tls-certs\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.626037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-internal-tls-certs\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.626079 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-combined-ca-bundle\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.626179 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/72e5a926-1c68-4e9b-9240-44c27d488e36-etc-swift\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.626211 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72e5a926-1c68-4e9b-9240-44c27d488e36-run-httpd\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.626234 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72e5a926-1c68-4e9b-9240-44c27d488e36-log-httpd\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.626667 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-config-data\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.626813 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq8lt\" (UniqueName: \"kubernetes.io/projected/72e5a926-1c68-4e9b-9240-44c27d488e36-kube-api-access-tq8lt\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.627674 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72e5a926-1c68-4e9b-9240-44c27d488e36-run-httpd\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.627982 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72e5a926-1c68-4e9b-9240-44c27d488e36-log-httpd\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.630854 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/72e5a926-1c68-4e9b-9240-44c27d488e36-etc-swift\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.631684 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-config-data\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.642175 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-public-tls-certs\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.647136 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-internal-tls-certs\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.648044 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq8lt\" (UniqueName: \"kubernetes.io/projected/72e5a926-1c68-4e9b-9240-44c27d488e36-kube-api-access-tq8lt\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.649913 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e5a926-1c68-4e9b-9240-44c27d488e36-combined-ca-bundle\") pod \"swift-proxy-59f44bc869-n7dl7\" (UID: \"72e5a926-1c68-4e9b-9240-44c27d488e36\") " pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.782987 4911 scope.go:117] "RemoveContainer" containerID="dab7e270f92c34ca1303f50e9cbf73d95d30f8a981e121916336054cc2e5f51d" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.790773 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.840980 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.856269 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.867980 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.877964 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.880726 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.880995 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.881048 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.901945 4911 scope.go:117] "RemoveContainer" containerID="3e4c5a6a86e849da795b3880e47ba39ecdb3f59fe15b674fa1b0c11010d25049" Jun 06 09:32:04 crc kubenswrapper[4911]: I0606 09:32:04.950263 4911 scope.go:117] "RemoveContainer" containerID="6209ea7a5d34331eca6041ec96e993a32b803cf0c033bdcf34ad0f99b821bbe2" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.041803 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-scripts\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.041864 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-config-data\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.042063 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.042237 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-log-httpd\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.042280 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kww9p\" (UniqueName: \"kubernetes.io/projected/80e5526c-70a7-44bb-9762-23caef81912d-kube-api-access-kww9p\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.042362 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-run-httpd\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.042387 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.144753 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-scripts\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.144893 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-config-data\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.144972 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.145030 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-log-httpd\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.145065 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kww9p\" (UniqueName: \"kubernetes.io/projected/80e5526c-70a7-44bb-9762-23caef81912d-kube-api-access-kww9p\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.145126 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-run-httpd\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.145147 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.149579 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-log-httpd\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.149661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-run-httpd\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.153824 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.154782 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-config-data\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.159498 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-scripts\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.159513 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.166833 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kww9p\" (UniqueName: \"kubernetes.io/projected/80e5526c-70a7-44bb-9762-23caef81912d-kube-api-access-kww9p\") pod \"ceilometer-0\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.244678 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.545281 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-59f44bc869-n7dl7"] Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.622242 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b6f8f48b-4n9zh" event={"ID":"de868b44-b17e-418a-9194-58d24fc9af42","Type":"ContainerStarted","Data":"e5b37d525ddcec754daf792e9f0afaaeb17fe744728feffb749677d43f2d9609"} Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.622722 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.625819 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" event={"ID":"cedbd680-e4db-4d32-8a18-ceebd62e89d0","Type":"ContainerStarted","Data":"d61c535edd57ba20ed2eea5f679d6a1ddb8f32216cab438c129405747dd0d7d0"} Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.626016 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.648946 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-58b6f8f48b-4n9zh" podStartSLOduration=3.648925517 podStartE2EDuration="3.648925517s" podCreationTimestamp="2025-06-06 09:32:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:05.640928343 +0000 UTC m=+1136.916353896" watchObservedRunningTime="2025-06-06 09:32:05.648925517 +0000 UTC m=+1136.924351060" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.678159 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" podStartSLOduration=3.678137275 podStartE2EDuration="3.678137275s" podCreationTimestamp="2025-06-06 09:32:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:05.665389879 +0000 UTC m=+1136.940815442" watchObservedRunningTime="2025-06-06 09:32:05.678137275 +0000 UTC m=+1136.953562818" Jun 06 09:32:05 crc kubenswrapper[4911]: I0606 09:32:05.961598 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f91e2479-870e-4a44-9130-488b55e8e092" path="/var/lib/kubelet/pods/f91e2479-870e-4a44-9130-488b55e8e092/volumes" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.286021 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54cd458d76-j6txj"] Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.287769 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.293773 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.293913 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.302392 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54cd458d76-j6txj"] Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.376354 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-internal-tls-certs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.376795 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-config-data-custom\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.376869 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-config-data\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.376891 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/201d1114-599e-4139-99e5-29e5cd900b81-logs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.376918 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-public-tls-certs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.376999 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-combined-ca-bundle\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.377117 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjkkw\" (UniqueName: \"kubernetes.io/projected/201d1114-599e-4139-99e5-29e5cd900b81-kube-api-access-vjkkw\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.482261 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-internal-tls-certs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.482354 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-config-data-custom\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.482394 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-config-data\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.482415 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/201d1114-599e-4139-99e5-29e5cd900b81-logs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.482441 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-public-tls-certs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.482511 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-combined-ca-bundle\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.482537 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjkkw\" (UniqueName: \"kubernetes.io/projected/201d1114-599e-4139-99e5-29e5cd900b81-kube-api-access-vjkkw\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.483895 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/201d1114-599e-4139-99e5-29e5cd900b81-logs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.496670 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-combined-ca-bundle\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.496976 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-internal-tls-certs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.499265 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-config-data\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.502149 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-config-data-custom\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.503676 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/201d1114-599e-4139-99e5-29e5cd900b81-public-tls-certs\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.525034 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjkkw\" (UniqueName: \"kubernetes.io/projected/201d1114-599e-4139-99e5-29e5cd900b81-kube-api-access-vjkkw\") pod \"barbican-api-54cd458d76-j6txj\" (UID: \"201d1114-599e-4139-99e5-29e5cd900b81\") " pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.650926 4911 generic.go:334] "Generic (PLEG): container finished" podID="cd2440f6-658c-4edb-938c-b40f2d3f7cf4" containerID="a153e854a31def2185439edccebff0910042704552a20e9b6fb22051e5e2a96f" exitCode=0 Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.651328 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-q7cj8" event={"ID":"cd2440f6-658c-4edb-938c-b40f2d3f7cf4","Type":"ContainerDied","Data":"a153e854a31def2185439edccebff0910042704552a20e9b6fb22051e5e2a96f"} Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.675673 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.684629 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-59f44bc869-n7dl7" event={"ID":"72e5a926-1c68-4e9b-9240-44c27d488e36","Type":"ContainerStarted","Data":"d020e0f620da2953e4b73b6025449771c472b30e51c187b9e665ebb7fbc25d94"} Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.685428 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:06 crc kubenswrapper[4911]: I0606 09:32:06.989837 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.207365 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54cd458d76-j6txj"] Jun 06 09:32:07 crc kubenswrapper[4911]: W0606 09:32:07.235407 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod201d1114_599e_4139_99e5_29e5cd900b81.slice/crio-114d42b775cacade3ea9f6a72ec57168c1d6117f1920554e7c17db8cd76ba94a WatchSource:0}: Error finding container 114d42b775cacade3ea9f6a72ec57168c1d6117f1920554e7c17db8cd76ba94a: Status 404 returned error can't find the container with id 114d42b775cacade3ea9f6a72ec57168c1d6117f1920554e7c17db8cd76ba94a Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.696582 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-59f44bc869-n7dl7" event={"ID":"72e5a926-1c68-4e9b-9240-44c27d488e36","Type":"ContainerStarted","Data":"3240e3cdc43d85677bec13bfa6bbf622bfc7d3e1749326318499076be1aaaa44"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.696958 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-59f44bc869-n7dl7" event={"ID":"72e5a926-1c68-4e9b-9240-44c27d488e36","Type":"ContainerStarted","Data":"50f05181c90e62c35be7c5ec7de1a076bedf27fbf0ae4893237220ef01e3764a"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.697123 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.697148 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.699339 4911 generic.go:334] "Generic (PLEG): container finished" podID="2ac55e69-d8fc-414b-add4-1d60dfcee487" containerID="bd1af89fbfa306f687233c3fcb8a5174c0ecdb8f42c26ffc0255ea9c17ecf8e9" exitCode=0 Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.699462 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4j5dr" event={"ID":"2ac55e69-d8fc-414b-add4-1d60dfcee487","Type":"ContainerDied","Data":"bd1af89fbfa306f687233c3fcb8a5174c0ecdb8f42c26ffc0255ea9c17ecf8e9"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.703124 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" event={"ID":"5bfc6577-537e-444b-aeab-e3f12ef96053","Type":"ContainerStarted","Data":"471a1da6bb82c536b6f4f11598d870b9733e348ebcfea81983d9930f9c930841"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.703165 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" event={"ID":"5bfc6577-537e-444b-aeab-e3f12ef96053","Type":"ContainerStarted","Data":"3e4d53fcd35eb4cc54f42b2bdb076bd565d7179f0417b4c02831043f7a56950a"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.707127 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8576d7c9-bckww" event={"ID":"8b8904bf-3086-4a57-b18c-2f113621fc14","Type":"ContainerStarted","Data":"6b8392ed555442acdd069f6c51c920ada7607e108b8d1aa43a645a709171c729"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.707173 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b8576d7c9-bckww" event={"ID":"8b8904bf-3086-4a57-b18c-2f113621fc14","Type":"ContainerStarted","Data":"db496eddacbf7f37087a5c5c61f994097bd7928d54cd72157ead06f1ba85d2bf"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.711987 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54cd458d76-j6txj" event={"ID":"201d1114-599e-4139-99e5-29e5cd900b81","Type":"ContainerStarted","Data":"c47ee9a8e20de3e305255253d653a5d4b4b1b9190e3e34fe31e95eb24e69811f"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.712033 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54cd458d76-j6txj" event={"ID":"201d1114-599e-4139-99e5-29e5cd900b81","Type":"ContainerStarted","Data":"114d42b775cacade3ea9f6a72ec57168c1d6117f1920554e7c17db8cd76ba94a"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.713800 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerStarted","Data":"1949c9dee80204695cb600c725efc2e743413036dfe5fb9970cef42ddf60ee18"} Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.727971 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-59f44bc869-n7dl7" podStartSLOduration=3.727951393 podStartE2EDuration="3.727951393s" podCreationTimestamp="2025-06-06 09:32:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:07.72784528 +0000 UTC m=+1139.003270833" watchObservedRunningTime="2025-06-06 09:32:07.727951393 +0000 UTC m=+1139.003376936" Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.768815 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-64cdf9799b-mrpz2" podStartSLOduration=2.942220991 podStartE2EDuration="5.768794108s" podCreationTimestamp="2025-06-06 09:32:02 +0000 UTC" firstStartedPulling="2025-06-06 09:32:03.614596785 +0000 UTC m=+1134.890022328" lastFinishedPulling="2025-06-06 09:32:06.441169892 +0000 UTC m=+1137.716595445" observedRunningTime="2025-06-06 09:32:07.765876463 +0000 UTC m=+1139.041302016" watchObservedRunningTime="2025-06-06 09:32:07.768794108 +0000 UTC m=+1139.044219651" Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.809573 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6b8576d7c9-bckww" podStartSLOduration=3.198708505 podStartE2EDuration="5.809547801s" podCreationTimestamp="2025-06-06 09:32:02 +0000 UTC" firstStartedPulling="2025-06-06 09:32:03.821929401 +0000 UTC m=+1135.097354944" lastFinishedPulling="2025-06-06 09:32:06.432768697 +0000 UTC m=+1137.708194240" observedRunningTime="2025-06-06 09:32:07.789352214 +0000 UTC m=+1139.064777777" watchObservedRunningTime="2025-06-06 09:32:07.809547801 +0000 UTC m=+1139.084973344" Jun 06 09:32:07 crc kubenswrapper[4911]: I0606 09:32:07.964121 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.400867 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-q7cj8" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.534819 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mc6p\" (UniqueName: \"kubernetes.io/projected/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-kube-api-access-4mc6p\") pod \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.535244 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-config-data\") pod \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.535400 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-combined-ca-bundle\") pod \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.535477 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-job-config-data\") pod \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\" (UID: \"cd2440f6-658c-4edb-938c-b40f2d3f7cf4\") " Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.549225 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "cd2440f6-658c-4edb-938c-b40f2d3f7cf4" (UID: "cd2440f6-658c-4edb-938c-b40f2d3f7cf4"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.551313 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-kube-api-access-4mc6p" (OuterVolumeSpecName: "kube-api-access-4mc6p") pod "cd2440f6-658c-4edb-938c-b40f2d3f7cf4" (UID: "cd2440f6-658c-4edb-938c-b40f2d3f7cf4"). InnerVolumeSpecName "kube-api-access-4mc6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.578262 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-config-data" (OuterVolumeSpecName: "config-data") pod "cd2440f6-658c-4edb-938c-b40f2d3f7cf4" (UID: "cd2440f6-658c-4edb-938c-b40f2d3f7cf4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.645815 4911 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-job-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.646193 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mc6p\" (UniqueName: \"kubernetes.io/projected/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-kube-api-access-4mc6p\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.646293 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.703272 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd2440f6-658c-4edb-938c-b40f2d3f7cf4" (UID: "cd2440f6-658c-4edb-938c-b40f2d3f7cf4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.732343 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-q7cj8" event={"ID":"cd2440f6-658c-4edb-938c-b40f2d3f7cf4","Type":"ContainerDied","Data":"b7e9943c0383cd953804addd6b91c6374c6812183fe04edbcd645375e754fda9"} Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.732389 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7e9943c0383cd953804addd6b91c6374c6812183fe04edbcd645375e754fda9" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.732456 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-q7cj8" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.744423 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54cd458d76-j6txj" event={"ID":"201d1114-599e-4139-99e5-29e5cd900b81","Type":"ContainerStarted","Data":"3899599533d37eaafef2a8429dd6baa0eeba50b202d29f71896a585f6bab36fc"} Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.745998 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.746506 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.747522 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2440f6-658c-4edb-938c-b40f2d3f7cf4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.751881 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerStarted","Data":"48956eedc37ed9b718ba75a7fe9ac3bdaf030ebff4bac6f41e11e743071d3ba6"} Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.776597 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54cd458d76-j6txj" podStartSLOduration=2.776575879 podStartE2EDuration="2.776575879s" podCreationTimestamp="2025-06-06 09:32:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:08.775383089 +0000 UTC m=+1140.050808632" watchObservedRunningTime="2025-06-06 09:32:08.776575879 +0000 UTC m=+1140.052001422" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.908787 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:08 crc kubenswrapper[4911]: E0606 09:32:08.909506 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2440f6-658c-4edb-938c-b40f2d3f7cf4" containerName="manila-db-sync" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.909524 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2440f6-658c-4edb-938c-b40f2d3f7cf4" containerName="manila-db-sync" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.909716 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2440f6-658c-4edb-938c-b40f2d3f7cf4" containerName="manila-db-sync" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.910663 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.915329 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.915329 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.915543 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.921354 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:08 crc kubenswrapper[4911]: I0606 09:32:08.923415 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-pffs5" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.014140 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.023777 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.033600 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.054015 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-scripts\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.054129 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.054194 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-localtime\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.054446 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.054520 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c99n\" (UniqueName: \"kubernetes.io/projected/31189f8d-bd76-4b64-84d2-193881b6b47a-kube-api-access-9c99n\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.054552 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.054577 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.056069 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.094952 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd77fdfb5-lr9bf"] Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.095471 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" podUID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerName="dnsmasq-dns" containerID="cri-o://d61c535edd57ba20ed2eea5f679d6a1ddb8f32216cab438c129405747dd0d7d0" gracePeriod=10 Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.144487 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-647c58c75-chstb"] Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.146700 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156585 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-scripts\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156712 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156763 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156806 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-localtime\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156865 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-ceph\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156901 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.156986 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c99n\" (UniqueName: \"kubernetes.io/projected/31189f8d-bd76-4b64-84d2-193881b6b47a-kube-api-access-9c99n\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157022 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-689tk\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-kube-api-access-689tk\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157111 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157162 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-scripts\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157198 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157256 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157314 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157354 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-localtime\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157642 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.157671 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-localtime\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.163213 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-scripts\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.163299 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.163315 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.163582 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.166655 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647c58c75-chstb"] Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.185384 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c99n\" (UniqueName: \"kubernetes.io/projected/31189f8d-bd76-4b64-84d2-193881b6b47a-kube-api-access-9c99n\") pod \"manila-scheduler-0\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.236978 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259657 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259715 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-nb\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-scripts\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259831 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-config\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259887 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ps2z\" (UniqueName: \"kubernetes.io/projected/c6b111da-d2fe-4bed-952d-b64183bc5442-kube-api-access-5ps2z\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259918 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-svc\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259943 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259973 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-localtime\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.259989 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-ceph\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.260010 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-sb\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.260038 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.260066 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-689tk\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-kube-api-access-689tk\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.260127 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.260336 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.260379 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-localtime\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.260150 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-swift-storage-0\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.263585 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.264280 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-ceph\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.265772 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-scripts\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.270348 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.272737 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.280248 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.288041 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-689tk\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-kube-api-access-689tk\") pod \"manila-share-share1-0\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.336144 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.349156 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.359345 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.362837 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.363144 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-nb\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.363267 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-config\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.363307 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ps2z\" (UniqueName: \"kubernetes.io/projected/c6b111da-d2fe-4bed-952d-b64183bc5442-kube-api-access-5ps2z\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.363338 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-svc\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.363386 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-sb\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.363474 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-swift-storage-0\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.364483 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-config\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.365740 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-svc\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.366132 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-sb\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.369279 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-nb\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.372628 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-swift-storage-0\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.385234 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.404425 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ps2z\" (UniqueName: \"kubernetes.io/projected/c6b111da-d2fe-4bed-952d-b64183bc5442-kube-api-access-5ps2z\") pod \"dnsmasq-dns-647c58c75-chstb\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.464956 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-localtime\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.465036 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.465055 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data-custom\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.465249 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/609b09fd-87cd-49be-b7ad-ddac6458a02c-logs\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.465276 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.465302 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-machine-id\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.465324 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-scripts\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.465344 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rcdv\" (UniqueName: \"kubernetes.io/projected/609b09fd-87cd-49be-b7ad-ddac6458a02c-kube-api-access-6rcdv\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.530326 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.567399 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.567505 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data-custom\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.567637 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/609b09fd-87cd-49be-b7ad-ddac6458a02c-logs\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.567674 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.567706 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-machine-id\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.567736 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-scripts\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.567757 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rcdv\" (UniqueName: \"kubernetes.io/projected/609b09fd-87cd-49be-b7ad-ddac6458a02c-kube-api-access-6rcdv\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.568933 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/609b09fd-87cd-49be-b7ad-ddac6458a02c-logs\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.572289 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-machine-id\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.572292 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-localtime\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.572340 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-localtime\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.574930 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.575080 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.575497 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data-custom\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.583858 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-scripts\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.607646 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rcdv\" (UniqueName: \"kubernetes.io/projected/609b09fd-87cd-49be-b7ad-ddac6458a02c-kube-api-access-6rcdv\") pod \"manila-api-0\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.750386 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.778376 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4j5dr" event={"ID":"2ac55e69-d8fc-414b-add4-1d60dfcee487","Type":"ContainerDied","Data":"f947066bc5bb3708c2e3cc9c3d665d10c85fc6b7ca2ef80dbab8505a930b9089"} Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.778460 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f947066bc5bb3708c2e3cc9c3d665d10c85fc6b7ca2ef80dbab8505a930b9089" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.804288 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.805308 4911 generic.go:334] "Generic (PLEG): container finished" podID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerID="d61c535edd57ba20ed2eea5f679d6a1ddb8f32216cab438c129405747dd0d7d0" exitCode=0 Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.805684 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" event={"ID":"cedbd680-e4db-4d32-8a18-ceebd62e89d0","Type":"ContainerDied","Data":"d61c535edd57ba20ed2eea5f679d6a1ddb8f32216cab438c129405747dd0d7d0"} Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.983830 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-machine-id\") pod \"2ac55e69-d8fc-414b-add4-1d60dfcee487\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.983933 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-scripts\") pod \"2ac55e69-d8fc-414b-add4-1d60dfcee487\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.984002 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-combined-ca-bundle\") pod \"2ac55e69-d8fc-414b-add4-1d60dfcee487\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.984028 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-config-data\") pod \"2ac55e69-d8fc-414b-add4-1d60dfcee487\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.984116 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-localtime\") pod \"2ac55e69-d8fc-414b-add4-1d60dfcee487\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.984164 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-755vx\" (UniqueName: \"kubernetes.io/projected/2ac55e69-d8fc-414b-add4-1d60dfcee487-kube-api-access-755vx\") pod \"2ac55e69-d8fc-414b-add4-1d60dfcee487\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.984232 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-db-sync-config-data\") pod \"2ac55e69-d8fc-414b-add4-1d60dfcee487\" (UID: \"2ac55e69-d8fc-414b-add4-1d60dfcee487\") " Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.984729 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "2ac55e69-d8fc-414b-add4-1d60dfcee487" (UID: "2ac55e69-d8fc-414b-add4-1d60dfcee487"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.984776 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2ac55e69-d8fc-414b-add4-1d60dfcee487" (UID: "2ac55e69-d8fc-414b-add4-1d60dfcee487"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.997105 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-scripts" (OuterVolumeSpecName: "scripts") pod "2ac55e69-d8fc-414b-add4-1d60dfcee487" (UID: "2ac55e69-d8fc-414b-add4-1d60dfcee487"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:09 crc kubenswrapper[4911]: I0606 09:32:09.997200 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2ac55e69-d8fc-414b-add4-1d60dfcee487" (UID: "2ac55e69-d8fc-414b-add4-1d60dfcee487"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.000215 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ac55e69-d8fc-414b-add4-1d60dfcee487-kube-api-access-755vx" (OuterVolumeSpecName: "kube-api-access-755vx") pod "2ac55e69-d8fc-414b-add4-1d60dfcee487" (UID: "2ac55e69-d8fc-414b-add4-1d60dfcee487"). InnerVolumeSpecName "kube-api-access-755vx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.052331 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ac55e69-d8fc-414b-add4-1d60dfcee487" (UID: "2ac55e69-d8fc-414b-add4-1d60dfcee487"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.089654 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.090307 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.090404 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.090458 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/2ac55e69-d8fc-414b-add4-1d60dfcee487-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.090509 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-755vx\" (UniqueName: \"kubernetes.io/projected/2ac55e69-d8fc-414b-add4-1d60dfcee487-kube-api-access-755vx\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.090566 4911 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.163377 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-config-data" (OuterVolumeSpecName: "config-data") pod "2ac55e69-d8fc-414b-add4-1d60dfcee487" (UID: "2ac55e69-d8fc-414b-add4-1d60dfcee487"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.176319 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.193123 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ac55e69-d8fc-414b-add4-1d60dfcee487-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.548893 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.571194 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647c58c75-chstb"] Jun 06 09:32:10 crc kubenswrapper[4911]: W0606 09:32:10.580975 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6b111da_d2fe_4bed_952d_b64183bc5442.slice/crio-6482ede861fa1c80846d90576dfe91b4dfe1e226e99c710e88d88dacfc9b5c81 WatchSource:0}: Error finding container 6482ede861fa1c80846d90576dfe91b4dfe1e226e99c710e88d88dacfc9b5c81: Status 404 returned error can't find the container with id 6482ede861fa1c80846d90576dfe91b4dfe1e226e99c710e88d88dacfc9b5c81 Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.820011 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"074afac4-64d1-4af0-a4fc-82e118a23756","Type":"ContainerStarted","Data":"d9d567b5cd0c5deb2e1d0cbdad1d649ca45f3548268d09a6edfb3979acbf38b9"} Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.826060 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647c58c75-chstb" event={"ID":"c6b111da-d2fe-4bed-952d-b64183bc5442","Type":"ContainerStarted","Data":"6482ede861fa1c80846d90576dfe91b4dfe1e226e99c710e88d88dacfc9b5c81"} Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.828223 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"31189f8d-bd76-4b64-84d2-193881b6b47a","Type":"ContainerStarted","Data":"79d7806c90c2a35d7dedfaca2c51a668cef002f327543a456b149cc4db6f08cd"} Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.828296 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4j5dr" Jun 06 09:32:10 crc kubenswrapper[4911]: I0606 09:32:10.949353 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.199958 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.255863 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647c58c75-chstb"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.257215 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmv9x\" (UniqueName: \"kubernetes.io/projected/cedbd680-e4db-4d32-8a18-ceebd62e89d0-kube-api-access-gmv9x\") pod \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.257317 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-sb\") pod \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.257396 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-config\") pod \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.257455 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-svc\") pod \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.257566 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-nb\") pod \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.257598 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-swift-storage-0\") pod \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\" (UID: \"cedbd680-e4db-4d32-8a18-ceebd62e89d0\") " Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.281692 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cedbd680-e4db-4d32-8a18-ceebd62e89d0-kube-api-access-gmv9x" (OuterVolumeSpecName: "kube-api-access-gmv9x") pod "cedbd680-e4db-4d32-8a18-ceebd62e89d0" (UID: "cedbd680-e4db-4d32-8a18-ceebd62e89d0"). InnerVolumeSpecName "kube-api-access-gmv9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.283981 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: E0606 09:32:11.284579 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerName="init" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.284599 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerName="init" Jun 06 09:32:11 crc kubenswrapper[4911]: E0606 09:32:11.284615 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerName="dnsmasq-dns" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.284623 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerName="dnsmasq-dns" Jun 06 09:32:11 crc kubenswrapper[4911]: E0606 09:32:11.284642 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ac55e69-d8fc-414b-add4-1d60dfcee487" containerName="cinder-db-sync" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.284650 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ac55e69-d8fc-414b-add4-1d60dfcee487" containerName="cinder-db-sync" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.285254 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ac55e69-d8fc-414b-add4-1d60dfcee487" containerName="cinder-db-sync" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.285279 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" containerName="dnsmasq-dns" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.288338 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.294275 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.294659 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-pb46k" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.294817 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.308968 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.354137 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cedbd680-e4db-4d32-8a18-ceebd62e89d0" (UID: "cedbd680-e4db-4d32-8a18-ceebd62e89d0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.355160 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cedbd680-e4db-4d32-8a18-ceebd62e89d0" (UID: "cedbd680-e4db-4d32-8a18-ceebd62e89d0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.357642 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cedbd680-e4db-4d32-8a18-ceebd62e89d0" (UID: "cedbd680-e4db-4d32-8a18-ceebd62e89d0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366017 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-scripts\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366078 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366130 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366187 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366223 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-localtime\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366246 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366266 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c422\" (UniqueName: \"kubernetes.io/projected/9cd217ab-c7bb-4850-9cce-be28e294667c-kube-api-access-2c422\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366488 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366525 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366538 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.366550 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmv9x\" (UniqueName: \"kubernetes.io/projected/cedbd680-e4db-4d32-8a18-ceebd62e89d0-kube-api-access-gmv9x\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.367562 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-config" (OuterVolumeSpecName: "config") pod "cedbd680-e4db-4d32-8a18-ceebd62e89d0" (UID: "cedbd680-e4db-4d32-8a18-ceebd62e89d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.378977 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b957c86d9-z2kmd"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.381119 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.386776 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.426776 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b957c86d9-z2kmd"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.443736 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cedbd680-e4db-4d32-8a18-ceebd62e89d0" (UID: "cedbd680-e4db-4d32-8a18-ceebd62e89d0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473304 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473377 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473404 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-svc\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473431 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473457 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2w9n\" (UniqueName: \"kubernetes.io/projected/522d16af-0c42-407e-8a26-115443d6e0fa-kube-api-access-q2w9n\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473488 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473523 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-localtime\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473547 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473564 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c422\" (UniqueName: \"kubernetes.io/projected/9cd217ab-c7bb-4850-9cce-be28e294667c-kube-api-access-2c422\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473599 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-scripts\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-config\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473671 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473692 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473736 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.473747 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cedbd680-e4db-4d32-8a18-ceebd62e89d0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.478871 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.484306 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.484407 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.484435 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-localtime\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.487497 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.489721 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.489840 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.493671 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.493683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-scripts\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.507676 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c422\" (UniqueName: \"kubernetes.io/projected/9cd217ab-c7bb-4850-9cce-be28e294667c-kube-api-access-2c422\") pod \"cinder-scheduler-0\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.508717 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.513403 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.515823 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.518360 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.527179 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.586518 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-config\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.586586 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.586662 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.586715 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-svc\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.586770 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.586815 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2w9n\" (UniqueName: \"kubernetes.io/projected/522d16af-0c42-407e-8a26-115443d6e0fa-kube-api-access-q2w9n\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.587873 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-config\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.587910 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.587910 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.589424 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-svc\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.589888 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.598417 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.600146 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.604571 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.609042 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2w9n\" (UniqueName: \"kubernetes.io/projected/522d16af-0c42-407e-8a26-115443d6e0fa-kube-api-access-q2w9n\") pod \"dnsmasq-dns-6b957c86d9-z2kmd\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.610367 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.656581 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.688167 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.688216 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-run\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.688246 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-localtime\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.688266 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlgvc\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-kube-api-access-nlgvc\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.688284 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-dev\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.688298 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.689812 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-lib-modules\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.689913 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-scripts\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.689949 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.689976 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690012 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690042 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-ceph\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690080 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690185 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690255 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690300 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690338 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690391 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-nvme\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690439 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690493 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690523 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-run\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690575 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-dev\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690593 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-sys\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690622 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-sys\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690660 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690691 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690720 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690743 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-localtime\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690776 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690805 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data-custom\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690829 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690852 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsnpb\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-kube-api-access-jsnpb\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690872 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.690918 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.727220 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.793289 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796248 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796301 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796340 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-ceph\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796388 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796437 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796485 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796525 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796551 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796579 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rs8r\" (UniqueName: \"kubernetes.io/projected/0f5cb979-1d90-4898-a6f5-9da614504640-kube-api-access-9rs8r\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796630 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-nvme\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796651 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796706 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796759 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796793 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-run\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796874 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-dev\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796895 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-sys\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796927 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-sys\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796957 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5cb979-1d90-4898-a6f5-9da614504640-logs\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796997 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797002 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797028 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797079 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797127 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797177 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797202 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-localtime\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797241 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797277 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-scripts\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797314 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797349 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data-custom\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797383 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797406 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsnpb\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-kube-api-access-jsnpb\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797452 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797529 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797568 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data-custom\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797648 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797676 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-localtime\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797710 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-run\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797743 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-nvme\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797756 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-localtime\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797799 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-localtime\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.796158 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797824 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlgvc\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-kube-api-access-nlgvc\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797854 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-dev\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797877 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797879 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797923 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-lib-modules\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797954 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-scripts\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.798150 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.799171 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-dev\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.799224 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-sys\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.799255 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-sys\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.799371 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.800976 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.802179 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.803062 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-run\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.803141 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-lib-modules\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797567 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-run\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.803441 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-localtime\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.803499 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.803546 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.803625 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.803695 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-dev\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.797855 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.804598 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.804704 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.806516 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.806710 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data-custom\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.808979 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-scripts\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.811536 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-ceph\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.811984 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.813409 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.819213 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.820845 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.823788 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsnpb\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-kube-api-access-jsnpb\") pod \"cinder-volume-volume1-0\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.824911 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlgvc\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-kube-api-access-nlgvc\") pod \"cinder-backup-0\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.842103 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" event={"ID":"cedbd680-e4db-4d32-8a18-ceebd62e89d0","Type":"ContainerDied","Data":"391306d0d2c08a36f986dc43f3e89715f98ec7ab2ae638d701d8b2425eb37f55"} Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.842166 4911 scope.go:117] "RemoveContainer" containerID="d61c535edd57ba20ed2eea5f679d6a1ddb8f32216cab438c129405747dd0d7d0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.842405 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd77fdfb5-lr9bf" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.863630 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerStarted","Data":"f22ac509253f2fc580f47a38b62dfa390bddb856f17dce57068a38f8f30ac1fa"} Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.864122 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.869301 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"609b09fd-87cd-49be-b7ad-ddac6458a02c","Type":"ContainerStarted","Data":"3c261120b57fef843ea8e3bd708e3b3adf369f8ff1d4c797c07ffb06ee1949ca"} Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.877185 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647c58c75-chstb" event={"ID":"c6b111da-d2fe-4bed-952d-b64183bc5442","Type":"ContainerStarted","Data":"f8894f7712324d881631084632b13a2d31b558b34a3060efbb849f02ef862ada"} Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.877355 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-647c58c75-chstb" podUID="c6b111da-d2fe-4bed-952d-b64183bc5442" containerName="init" containerID="cri-o://f8894f7712324d881631084632b13a2d31b558b34a3060efbb849f02ef862ada" gracePeriod=10 Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.884532 4911 scope.go:117] "RemoveContainer" containerID="ecae0cfb0db853faba14a84bade72b105ac55d822b1fd89734875412ba8c13f7" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903458 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rs8r\" (UniqueName: \"kubernetes.io/projected/0f5cb979-1d90-4898-a6f5-9da614504640-kube-api-access-9rs8r\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903528 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903595 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5cb979-1d90-4898-a6f5-9da614504640-logs\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903628 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903744 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903777 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-scripts\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903850 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data-custom\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.903889 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-localtime\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.905122 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd77fdfb5-lr9bf"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.905221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-localtime\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.905264 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.906221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5cb979-1d90-4898-a6f5-9da614504640-logs\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.909331 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-scripts\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.909627 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.910004 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data-custom\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.910510 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.913688 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd77fdfb5-lr9bf"] Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.923372 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rs8r\" (UniqueName: \"kubernetes.io/projected/0f5cb979-1d90-4898-a6f5-9da614504640-kube-api-access-9rs8r\") pod \"cinder-api-0\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " pod="openstack/cinder-api-0" Jun 06 09:32:11 crc kubenswrapper[4911]: I0606 09:32:11.975336 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cedbd680-e4db-4d32-8a18-ceebd62e89d0" path="/var/lib/kubelet/pods/cedbd680-e4db-4d32-8a18-ceebd62e89d0/volumes" Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.116783 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.177109 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.186067 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.477740 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b957c86d9-z2kmd"] Jun 06 09:32:12 crc kubenswrapper[4911]: W0606 09:32:12.626852 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod522d16af_0c42_407e_8a26_115443d6e0fa.slice/crio-6446a915015b1790f3521e496b77c961a6416ce8e8c9b525998eb979db6f8437 WatchSource:0}: Error finding container 6446a915015b1790f3521e496b77c961a6416ce8e8c9b525998eb979db6f8437: Status 404 returned error can't find the container with id 6446a915015b1790f3521e496b77c961a6416ce8e8c9b525998eb979db6f8437 Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.802304 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:12 crc kubenswrapper[4911]: W0606 09:32:12.865942 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf60f5eac_7f8c_4909_ae10_449d6e6df432.slice/crio-48e7076c053c889bee684076408ceb20232c4c7edfd074615be9282fa137e2a7 WatchSource:0}: Error finding container 48e7076c053c889bee684076408ceb20232c4c7edfd074615be9282fa137e2a7: Status 404 returned error can't find the container with id 48e7076c053c889bee684076408ceb20232c4c7edfd074615be9282fa137e2a7 Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.930416 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" event={"ID":"522d16af-0c42-407e-8a26-115443d6e0fa","Type":"ContainerStarted","Data":"6446a915015b1790f3521e496b77c961a6416ce8e8c9b525998eb979db6f8437"} Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.945666 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"609b09fd-87cd-49be-b7ad-ddac6458a02c","Type":"ContainerStarted","Data":"aec071b040ce5eb4ac34984bbd213f9555a0a072b956e5249bd255911bdbf4bf"} Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.954382 4911 generic.go:334] "Generic (PLEG): container finished" podID="c6b111da-d2fe-4bed-952d-b64183bc5442" containerID="f8894f7712324d881631084632b13a2d31b558b34a3060efbb849f02ef862ada" exitCode=0 Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.954487 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647c58c75-chstb" event={"ID":"c6b111da-d2fe-4bed-952d-b64183bc5442","Type":"ContainerDied","Data":"f8894f7712324d881631084632b13a2d31b558b34a3060efbb849f02ef862ada"} Jun 06 09:32:12 crc kubenswrapper[4911]: I0606 09:32:12.955824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9cd217ab-c7bb-4850-9cce-be28e294667c","Type":"ContainerStarted","Data":"735a3708ec2d2f875682741bc3e6dba6df6e82dd68a14d29734f10afc0c9a280"} Jun 06 09:32:13 crc kubenswrapper[4911]: I0606 09:32:13.250585 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:13 crc kubenswrapper[4911]: I0606 09:32:13.318986 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.000635 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"695d811f-7aed-428d-b224-e6711323e54b","Type":"ContainerStarted","Data":"bea911b5f4bf9f55bf02d91e9302f03b954adce42a2ec88b2a2950f4703c14e6"} Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.017777 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"609b09fd-87cd-49be-b7ad-ddac6458a02c","Type":"ContainerStarted","Data":"61d172a50d60da55e4639c793bcda6ffe3f914d275c3ad39a34471e14932f447"} Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.018994 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.034058 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"31189f8d-bd76-4b64-84d2-193881b6b47a","Type":"ContainerStarted","Data":"389a8d1ce8531cd3a04ebb23f6d4d12dd12da9be2a643fce93da2c248b8dbe1e"} Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.057947 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.057922079 podStartE2EDuration="5.057922079s" podCreationTimestamp="2025-06-06 09:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:14.056339388 +0000 UTC m=+1145.331764941" watchObservedRunningTime="2025-06-06 09:32:14.057922079 +0000 UTC m=+1145.333347622" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.065120 4911 generic.go:334] "Generic (PLEG): container finished" podID="522d16af-0c42-407e-8a26-115443d6e0fa" containerID="c7627ce5d4a1ded581f26985bd0106a03e7c83defaff24ddcfbb7ecb2c2ffbc9" exitCode=0 Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.065244 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" event={"ID":"522d16af-0c42-407e-8a26-115443d6e0fa","Type":"ContainerDied","Data":"c7627ce5d4a1ded581f26985bd0106a03e7c83defaff24ddcfbb7ecb2c2ffbc9"} Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.089720 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"f60f5eac-7f8c-4909-ae10-449d6e6df432","Type":"ContainerStarted","Data":"48e7076c053c889bee684076408ceb20232c4c7edfd074615be9282fa137e2a7"} Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.091390 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0f5cb979-1d90-4898-a6f5-9da614504640","Type":"ContainerStarted","Data":"00f579f18602d84a5ed0911646099d41cfb08c1e2a2d8b7aeb82d961b0490d46"} Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.132608 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.203281 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ps2z\" (UniqueName: \"kubernetes.io/projected/c6b111da-d2fe-4bed-952d-b64183bc5442-kube-api-access-5ps2z\") pod \"c6b111da-d2fe-4bed-952d-b64183bc5442\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.203409 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-nb\") pod \"c6b111da-d2fe-4bed-952d-b64183bc5442\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.203454 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-sb\") pod \"c6b111da-d2fe-4bed-952d-b64183bc5442\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.203559 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-swift-storage-0\") pod \"c6b111da-d2fe-4bed-952d-b64183bc5442\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.203765 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-svc\") pod \"c6b111da-d2fe-4bed-952d-b64183bc5442\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.203797 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-config\") pod \"c6b111da-d2fe-4bed-952d-b64183bc5442\" (UID: \"c6b111da-d2fe-4bed-952d-b64183bc5442\") " Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.223603 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6b111da-d2fe-4bed-952d-b64183bc5442-kube-api-access-5ps2z" (OuterVolumeSpecName: "kube-api-access-5ps2z") pod "c6b111da-d2fe-4bed-952d-b64183bc5442" (UID: "c6b111da-d2fe-4bed-952d-b64183bc5442"). InnerVolumeSpecName "kube-api-access-5ps2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.285828 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c6b111da-d2fe-4bed-952d-b64183bc5442" (UID: "c6b111da-d2fe-4bed-952d-b64183bc5442"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.286323 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-config" (OuterVolumeSpecName: "config") pod "c6b111da-d2fe-4bed-952d-b64183bc5442" (UID: "c6b111da-d2fe-4bed-952d-b64183bc5442"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.286818 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c6b111da-d2fe-4bed-952d-b64183bc5442" (UID: "c6b111da-d2fe-4bed-952d-b64183bc5442"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.286848 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c6b111da-d2fe-4bed-952d-b64183bc5442" (UID: "c6b111da-d2fe-4bed-952d-b64183bc5442"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.299224 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c6b111da-d2fe-4bed-952d-b64183bc5442" (UID: "c6b111da-d2fe-4bed-952d-b64183bc5442"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.306674 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.306717 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.306730 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ps2z\" (UniqueName: \"kubernetes.io/projected/c6b111da-d2fe-4bed-952d-b64183bc5442-kube-api-access-5ps2z\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.306744 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.306755 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.306766 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6b111da-d2fe-4bed-952d-b64183bc5442-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.472160 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-qjvkc"] Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.472476 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-qjvkc" podUID="a6c02116-6df3-432f-808f-9417c69e5531" containerName="container-00" containerID="cri-o://ae4e870ca3bceb41d51fdece0cf6a3ebd106e9085d9fecc9abd35645c76f943c" gracePeriod=2 Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.479544 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-qjvkc"] Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.810006 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:14 crc kubenswrapper[4911]: I0606 09:32:14.825760 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-59f44bc869-n7dl7" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.122203 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.207998 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjvkc" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.208186 4911 generic.go:334] "Generic (PLEG): container finished" podID="a6c02116-6df3-432f-808f-9417c69e5531" containerID="ae4e870ca3bceb41d51fdece0cf6a3ebd106e9085d9fecc9abd35645c76f943c" exitCode=0 Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.208346 4911 scope.go:117] "RemoveContainer" containerID="ae4e870ca3bceb41d51fdece0cf6a3ebd106e9085d9fecc9abd35645c76f943c" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.212249 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647c58c75-chstb" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.212250 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647c58c75-chstb" event={"ID":"c6b111da-d2fe-4bed-952d-b64183bc5442","Type":"ContainerDied","Data":"6482ede861fa1c80846d90576dfe91b4dfe1e226e99c710e88d88dacfc9b5c81"} Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.234225 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dvmm\" (UniqueName: \"kubernetes.io/projected/a6c02116-6df3-432f-808f-9417c69e5531-kube-api-access-2dvmm\") pod \"a6c02116-6df3-432f-808f-9417c69e5531\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.234424 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6c02116-6df3-432f-808f-9417c69e5531-host\") pod \"a6c02116-6df3-432f-808f-9417c69e5531\" (UID: \"a6c02116-6df3-432f-808f-9417c69e5531\") " Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.234852 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6c02116-6df3-432f-808f-9417c69e5531-host" (OuterVolumeSpecName: "host") pod "a6c02116-6df3-432f-808f-9417c69e5531" (UID: "a6c02116-6df3-432f-808f-9417c69e5531"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.248499 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a6c02116-6df3-432f-808f-9417c69e5531-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.310298 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6c02116-6df3-432f-808f-9417c69e5531-kube-api-access-2dvmm" (OuterVolumeSpecName: "kube-api-access-2dvmm") pod "a6c02116-6df3-432f-808f-9417c69e5531" (UID: "a6c02116-6df3-432f-808f-9417c69e5531"). InnerVolumeSpecName "kube-api-access-2dvmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.382427 4911 scope.go:117] "RemoveContainer" containerID="f8894f7712324d881631084632b13a2d31b558b34a3060efbb849f02ef862ada" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.388871 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dvmm\" (UniqueName: \"kubernetes.io/projected/a6c02116-6df3-432f-808f-9417c69e5531-kube-api-access-2dvmm\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.454762 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647c58c75-chstb"] Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.480196 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-647c58c75-chstb"] Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.489071 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.967387 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6c02116-6df3-432f-808f-9417c69e5531" path="/var/lib/kubelet/pods/a6c02116-6df3-432f-808f-9417c69e5531/volumes" Jun 06 09:32:15 crc kubenswrapper[4911]: I0606 09:32:15.968422 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6b111da-d2fe-4bed-952d-b64183bc5442" path="/var/lib/kubelet/pods/c6b111da-d2fe-4bed-952d-b64183bc5442/volumes" Jun 06 09:32:16 crc kubenswrapper[4911]: I0606 09:32:16.234802 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerStarted","Data":"703c9552807275eeadb12da0272fe0a11e50f9a9e334262d3b7f6607b7eaae19"} Jun 06 09:32:16 crc kubenswrapper[4911]: I0606 09:32:16.260065 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" event={"ID":"522d16af-0c42-407e-8a26-115443d6e0fa","Type":"ContainerStarted","Data":"b7d533553b87a1be2a974258c757c73bc1f2f166ced6c21ce80c2cf802d17c37"} Jun 06 09:32:16 crc kubenswrapper[4911]: I0606 09:32:16.261709 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:16 crc kubenswrapper[4911]: I0606 09:32:16.274277 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qjvkc" Jun 06 09:32:16 crc kubenswrapper[4911]: I0606 09:32:16.293680 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" podStartSLOduration=5.293664205 podStartE2EDuration="5.293664205s" podCreationTimestamp="2025-06-06 09:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:16.2899573 +0000 UTC m=+1147.565382843" watchObservedRunningTime="2025-06-06 09:32:16.293664205 +0000 UTC m=+1147.569089748" Jun 06 09:32:16 crc kubenswrapper[4911]: I0606 09:32:16.608158 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:16 crc kubenswrapper[4911]: I0606 09:32:16.694977 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:17 crc kubenswrapper[4911]: I0606 09:32:17.336199 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"31189f8d-bd76-4b64-84d2-193881b6b47a","Type":"ContainerStarted","Data":"e59874f9b16a48d6ffb8e9ec63945e74d9de29139d79c8eef773dd6a8df0c8c1"} Jun 06 09:32:17 crc kubenswrapper[4911]: I0606 09:32:17.339348 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0f5cb979-1d90-4898-a6f5-9da614504640","Type":"ContainerStarted","Data":"cca1a79fdda6d99b5881ffbf6d7c71f3d11ccd34d3aeb81626eb8c2eff0154e3"} Jun 06 09:32:17 crc kubenswrapper[4911]: I0606 09:32:17.344288 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"695d811f-7aed-428d-b224-e6711323e54b","Type":"ContainerStarted","Data":"6c3c8c1ebfedecd7827ed1c762f828423cabcc481c7c87c0e69e87f5059d675a"} Jun 06 09:32:17 crc kubenswrapper[4911]: I0606 09:32:17.344883 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api-log" containerID="cri-o://aec071b040ce5eb4ac34984bbd213f9555a0a072b956e5249bd255911bdbf4bf" gracePeriod=30 Jun 06 09:32:17 crc kubenswrapper[4911]: I0606 09:32:17.345028 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api" containerID="cri-o://61d172a50d60da55e4639c793bcda6ffe3f914d275c3ad39a34471e14932f447" gracePeriod=30 Jun 06 09:32:17 crc kubenswrapper[4911]: I0606 09:32:17.419687 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=7.091321437 podStartE2EDuration="9.419667792s" podCreationTimestamp="2025-06-06 09:32:08 +0000 UTC" firstStartedPulling="2025-06-06 09:32:10.383648708 +0000 UTC m=+1141.659074251" lastFinishedPulling="2025-06-06 09:32:12.711995063 +0000 UTC m=+1143.987420606" observedRunningTime="2025-06-06 09:32:17.38128927 +0000 UTC m=+1148.656714833" watchObservedRunningTime="2025-06-06 09:32:17.419667792 +0000 UTC m=+1148.695093335" Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.409512 4911 generic.go:334] "Generic (PLEG): container finished" podID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerID="61d172a50d60da55e4639c793bcda6ffe3f914d275c3ad39a34471e14932f447" exitCode=0 Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.410397 4911 generic.go:334] "Generic (PLEG): container finished" podID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerID="aec071b040ce5eb4ac34984bbd213f9555a0a072b956e5249bd255911bdbf4bf" exitCode=143 Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.410513 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"609b09fd-87cd-49be-b7ad-ddac6458a02c","Type":"ContainerDied","Data":"61d172a50d60da55e4639c793bcda6ffe3f914d275c3ad39a34471e14932f447"} Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.410567 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"609b09fd-87cd-49be-b7ad-ddac6458a02c","Type":"ContainerDied","Data":"aec071b040ce5eb4ac34984bbd213f9555a0a072b956e5249bd255911bdbf4bf"} Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.412927 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9cd217ab-c7bb-4850-9cce-be28e294667c","Type":"ContainerStarted","Data":"a896e44409219f1d9c63a72a06b4503a5a4dd13efb7a3e385afec726f6c1d256"} Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.414757 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"f60f5eac-7f8c-4909-ae10-449d6e6df432","Type":"ContainerStarted","Data":"ad6bf48e7eaa52143e9c84d5f8eeb454e4462460800a93ad0c837d41cc2675c9"} Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.423206 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"695d811f-7aed-428d-b224-e6711323e54b","Type":"ContainerStarted","Data":"9e37cbef049348eda0dd25ecd145fd0216187016081578e8bfa7bfb621329a92"} Jun 06 09:32:18 crc kubenswrapper[4911]: I0606 09:32:18.458316 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=4.719349477 podStartE2EDuration="7.458299782s" podCreationTimestamp="2025-06-06 09:32:11 +0000 UTC" firstStartedPulling="2025-06-06 09:32:13.401724765 +0000 UTC m=+1144.677150308" lastFinishedPulling="2025-06-06 09:32:16.14067507 +0000 UTC m=+1147.416100613" observedRunningTime="2025-06-06 09:32:18.454872595 +0000 UTC m=+1149.730298148" watchObservedRunningTime="2025-06-06 09:32:18.458299782 +0000 UTC m=+1149.733725325" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.136468 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.220767 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rcdv\" (UniqueName: \"kubernetes.io/projected/609b09fd-87cd-49be-b7ad-ddac6458a02c-kube-api-access-6rcdv\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.221196 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-scripts\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.221235 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-combined-ca-bundle\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.221274 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/609b09fd-87cd-49be-b7ad-ddac6458a02c-logs\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.222118 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/609b09fd-87cd-49be-b7ad-ddac6458a02c-logs" (OuterVolumeSpecName: "logs") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.223317 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-localtime\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.223401 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.223479 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data-custom\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.223514 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.223613 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-machine-id\") pod \"609b09fd-87cd-49be-b7ad-ddac6458a02c\" (UID: \"609b09fd-87cd-49be-b7ad-ddac6458a02c\") " Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.224044 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.224665 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/609b09fd-87cd-49be-b7ad-ddac6458a02c-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.224690 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.224704 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/609b09fd-87cd-49be-b7ad-ddac6458a02c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.238183 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.244791 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-scripts" (OuterVolumeSpecName: "scripts") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.253472 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/609b09fd-87cd-49be-b7ad-ddac6458a02c-kube-api-access-6rcdv" (OuterVolumeSpecName: "kube-api-access-6rcdv") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "kube-api-access-6rcdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.264288 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.326349 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rcdv\" (UniqueName: \"kubernetes.io/projected/609b09fd-87cd-49be-b7ad-ddac6458a02c-kube-api-access-6rcdv\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.326397 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.326406 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.382650 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.511051 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.511326 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"609b09fd-87cd-49be-b7ad-ddac6458a02c","Type":"ContainerDied","Data":"3c261120b57fef843ea8e3bd708e3b3adf369f8ff1d4c797c07ffb06ee1949ca"} Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.511394 4911 scope.go:117] "RemoveContainer" containerID="61d172a50d60da55e4639c793bcda6ffe3f914d275c3ad39a34471e14932f447" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.511464 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.527649 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerStarted","Data":"f8b08ac8e651d30cd7517db26c90fe1282c1a6e1ce20ffcfa24f782b354f4a51"} Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.527889 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-central-agent" containerID="cri-o://48956eedc37ed9b718ba75a7fe9ac3bdaf030ebff4bac6f41e11e743071d3ba6" gracePeriod=30 Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.528130 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.528077 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="sg-core" containerID="cri-o://703c9552807275eeadb12da0272fe0a11e50f9a9e334262d3b7f6607b7eaae19" gracePeriod=30 Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.528217 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="proxy-httpd" containerID="cri-o://f8b08ac8e651d30cd7517db26c90fe1282c1a6e1ce20ffcfa24f782b354f4a51" gracePeriod=30 Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.528326 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-notification-agent" containerID="cri-o://f22ac509253f2fc580f47a38b62dfa390bddb856f17dce57068a38f8f30ac1fa" gracePeriod=30 Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.533437 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data" (OuterVolumeSpecName: "config-data") pod "609b09fd-87cd-49be-b7ad-ddac6458a02c" (UID: "609b09fd-87cd-49be-b7ad-ddac6458a02c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.539553 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"f60f5eac-7f8c-4909-ae10-449d6e6df432","Type":"ContainerStarted","Data":"ae93c1123ff5cc08f03fe458427bdc45141e8e2c0e6ce6afd19f8efd56eea572"} Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.543523 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.543567 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/609b09fd-87cd-49be-b7ad-ddac6458a02c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.547605 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0f5cb979-1d90-4898-a6f5-9da614504640","Type":"ContainerStarted","Data":"0339b6cd6b72ae4b92773d75e9df42a3cbd647483046b48db94d52799677d829"} Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.548031 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api-log" containerID="cri-o://cca1a79fdda6d99b5881ffbf6d7c71f3d11ccd34d3aeb81626eb8c2eff0154e3" gracePeriod=30 Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.548191 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api" containerID="cri-o://0339b6cd6b72ae4b92773d75e9df42a3cbd647483046b48db94d52799677d829" gracePeriod=30 Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.578261 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.5250340829999995 podStartE2EDuration="15.578238014s" podCreationTimestamp="2025-06-06 09:32:04 +0000 UTC" firstStartedPulling="2025-06-06 09:32:06.996554516 +0000 UTC m=+1138.271980059" lastFinishedPulling="2025-06-06 09:32:18.049758447 +0000 UTC m=+1149.325183990" observedRunningTime="2025-06-06 09:32:19.570743232 +0000 UTC m=+1150.846168775" watchObservedRunningTime="2025-06-06 09:32:19.578238014 +0000 UTC m=+1150.853663567" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.585302 4911 scope.go:117] "RemoveContainer" containerID="aec071b040ce5eb4ac34984bbd213f9555a0a072b956e5249bd255911bdbf4bf" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.643486 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=4.773130773 podStartE2EDuration="8.643458993s" podCreationTimestamp="2025-06-06 09:32:11 +0000 UTC" firstStartedPulling="2025-06-06 09:32:13.052371204 +0000 UTC m=+1144.327796747" lastFinishedPulling="2025-06-06 09:32:16.922699424 +0000 UTC m=+1148.198124967" observedRunningTime="2025-06-06 09:32:19.634504674 +0000 UTC m=+1150.909930247" watchObservedRunningTime="2025-06-06 09:32:19.643458993 +0000 UTC m=+1150.918884536" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.706738 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=8.706710022 podStartE2EDuration="8.706710022s" podCreationTimestamp="2025-06-06 09:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:19.692298253 +0000 UTC m=+1150.967723796" watchObservedRunningTime="2025-06-06 09:32:19.706710022 +0000 UTC m=+1150.982135565" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.901255 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.999480 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.999522 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:19 crc kubenswrapper[4911]: E0606 09:32:19.999805 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.999818 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api" Jun 06 09:32:19 crc kubenswrapper[4911]: E0606 09:32:19.999831 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api-log" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.999838 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api-log" Jun 06 09:32:19 crc kubenswrapper[4911]: E0606 09:32:19.999849 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6c02116-6df3-432f-808f-9417c69e5531" containerName="container-00" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.999855 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6c02116-6df3-432f-808f-9417c69e5531" containerName="container-00" Jun 06 09:32:19 crc kubenswrapper[4911]: E0606 09:32:19.999884 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b111da-d2fe-4bed-952d-b64183bc5442" containerName="init" Jun 06 09:32:19 crc kubenswrapper[4911]: I0606 09:32:19.999891 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b111da-d2fe-4bed-952d-b64183bc5442" containerName="init" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.000083 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6b111da-d2fe-4bed-952d-b64183bc5442" containerName="init" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.000124 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.000146 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" containerName="manila-api-log" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.000159 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6c02116-6df3-432f-808f-9417c69e5531" containerName="container-00" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.001068 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.001162 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.007249 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.007483 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.007641 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.169475 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-config-data\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.169552 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.169579 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlfsg\" (UniqueName: \"kubernetes.io/projected/5f46a595-e6b7-4e07-a817-8e37db292ace-kube-api-access-jlfsg\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.169606 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f46a595-e6b7-4e07-a817-8e37db292ace-logs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.169704 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-internal-tls-certs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.169747 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-config-data-custom\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.170102 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-scripts\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.170159 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/5f46a595-e6b7-4e07-a817-8e37db292ace-etc-localtime\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.170226 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f46a595-e6b7-4e07-a817-8e37db292ace-etc-machine-id\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.170310 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-public-tls-certs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.263680 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54cd458d76-j6txj" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.273781 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-public-tls-certs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.273964 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-config-data\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274017 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274049 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlfsg\" (UniqueName: \"kubernetes.io/projected/5f46a595-e6b7-4e07-a817-8e37db292ace-kube-api-access-jlfsg\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274077 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f46a595-e6b7-4e07-a817-8e37db292ace-logs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274176 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-internal-tls-certs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-config-data-custom\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274277 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-scripts\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274297 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/5f46a595-e6b7-4e07-a817-8e37db292ace-etc-localtime\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274331 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f46a595-e6b7-4e07-a817-8e37db292ace-etc-machine-id\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.274433 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5f46a595-e6b7-4e07-a817-8e37db292ace-etc-machine-id\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.277848 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/5f46a595-e6b7-4e07-a817-8e37db292ace-etc-localtime\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.279589 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f46a595-e6b7-4e07-a817-8e37db292ace-logs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.281783 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-scripts\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.284302 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-public-tls-certs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.285573 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-config-data-custom\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.285694 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.303506 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-config-data\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.304981 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f46a595-e6b7-4e07-a817-8e37db292ace-internal-tls-certs\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.318970 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlfsg\" (UniqueName: \"kubernetes.io/projected/5f46a595-e6b7-4e07-a817-8e37db292ace-kube-api-access-jlfsg\") pod \"manila-api-0\" (UID: \"5f46a595-e6b7-4e07-a817-8e37db292ace\") " pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.327842 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.372060 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-58b6f8f48b-4n9zh"] Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.373741 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-58b6f8f48b-4n9zh" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api-log" containerID="cri-o://d56122564610d000fecbf3e19a2ad90f1d89a0bd6a45975fc3142fd705e352c9" gracePeriod=30 Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.374254 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-58b6f8f48b-4n9zh" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api" containerID="cri-o://e5b37d525ddcec754daf792e9f0afaaeb17fe744728feffb749677d43f2d9609" gracePeriod=30 Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.597319 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9cd217ab-c7bb-4850-9cce-be28e294667c","Type":"ContainerStarted","Data":"05e24bed45229dcc5ed1d86b595cd98de482685be1964564bd2063e3ded505f4"} Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.614572 4911 generic.go:334] "Generic (PLEG): container finished" podID="80e5526c-70a7-44bb-9762-23caef81912d" containerID="703c9552807275eeadb12da0272fe0a11e50f9a9e334262d3b7f6607b7eaae19" exitCode=2 Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.614881 4911 generic.go:334] "Generic (PLEG): container finished" podID="80e5526c-70a7-44bb-9762-23caef81912d" containerID="f22ac509253f2fc580f47a38b62dfa390bddb856f17dce57068a38f8f30ac1fa" exitCode=0 Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.614933 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerDied","Data":"703c9552807275eeadb12da0272fe0a11e50f9a9e334262d3b7f6607b7eaae19"} Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.614964 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerDied","Data":"f22ac509253f2fc580f47a38b62dfa390bddb856f17dce57068a38f8f30ac1fa"} Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.630703 4911 generic.go:334] "Generic (PLEG): container finished" podID="0f5cb979-1d90-4898-a6f5-9da614504640" containerID="0339b6cd6b72ae4b92773d75e9df42a3cbd647483046b48db94d52799677d829" exitCode=0 Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.630788 4911 generic.go:334] "Generic (PLEG): container finished" podID="0f5cb979-1d90-4898-a6f5-9da614504640" containerID="cca1a79fdda6d99b5881ffbf6d7c71f3d11ccd34d3aeb81626eb8c2eff0154e3" exitCode=143 Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.630876 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0f5cb979-1d90-4898-a6f5-9da614504640","Type":"ContainerDied","Data":"0339b6cd6b72ae4b92773d75e9df42a3cbd647483046b48db94d52799677d829"} Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.630910 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0f5cb979-1d90-4898-a6f5-9da614504640","Type":"ContainerDied","Data":"cca1a79fdda6d99b5881ffbf6d7c71f3d11ccd34d3aeb81626eb8c2eff0154e3"} Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.663184 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=6.729302966 podStartE2EDuration="9.663157679s" podCreationTimestamp="2025-06-06 09:32:11 +0000 UTC" firstStartedPulling="2025-06-06 09:32:12.3422182 +0000 UTC m=+1143.617643733" lastFinishedPulling="2025-06-06 09:32:15.276072903 +0000 UTC m=+1146.551498446" observedRunningTime="2025-06-06 09:32:20.62449442 +0000 UTC m=+1151.899919973" watchObservedRunningTime="2025-06-06 09:32:20.663157679 +0000 UTC m=+1151.938583232" Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.681431 4911 generic.go:334] "Generic (PLEG): container finished" podID="de868b44-b17e-418a-9194-58d24fc9af42" containerID="d56122564610d000fecbf3e19a2ad90f1d89a0bd6a45975fc3142fd705e352c9" exitCode=143 Jun 06 09:32:20 crc kubenswrapper[4911]: I0606 09:32:20.681754 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b6f8f48b-4n9zh" event={"ID":"de868b44-b17e-418a-9194-58d24fc9af42","Type":"ContainerDied","Data":"d56122564610d000fecbf3e19a2ad90f1d89a0bd6a45975fc3142fd705e352c9"} Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.300376 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.317024 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.438551 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.438695 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rs8r\" (UniqueName: \"kubernetes.io/projected/0f5cb979-1d90-4898-a6f5-9da614504640-kube-api-access-9rs8r\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.438987 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5cb979-1d90-4898-a6f5-9da614504640-logs\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.439072 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-localtime\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.439150 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data-custom\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.439252 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-scripts\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.439305 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-machine-id\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.439330 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-combined-ca-bundle\") pod \"0f5cb979-1d90-4898-a6f5-9da614504640\" (UID: \"0f5cb979-1d90-4898-a6f5-9da614504640\") " Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.447358 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f5cb979-1d90-4898-a6f5-9da614504640-kube-api-access-9rs8r" (OuterVolumeSpecName: "kube-api-access-9rs8r") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "kube-api-access-9rs8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.447793 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f5cb979-1d90-4898-a6f5-9da614504640-logs" (OuterVolumeSpecName: "logs") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.449202 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.449285 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.453296 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.456006 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-scripts" (OuterVolumeSpecName: "scripts") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.475215 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.525483 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data" (OuterVolumeSpecName: "config-data") pod "0f5cb979-1d90-4898-a6f5-9da614504640" (UID: "0f5cb979-1d90-4898-a6f5-9da614504640"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550695 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5cb979-1d90-4898-a6f5-9da614504640-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550745 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550759 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550769 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550780 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550792 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0f5cb979-1d90-4898-a6f5-9da614504640-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550802 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5cb979-1d90-4898-a6f5-9da614504640-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.550812 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rs8r\" (UniqueName: \"kubernetes.io/projected/0f5cb979-1d90-4898-a6f5-9da614504640-kube-api-access-9rs8r\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.657404 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.710224 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"5f46a595-e6b7-4e07-a817-8e37db292ace","Type":"ContainerStarted","Data":"20c365e58227936c732e2c342262946d47bb2bad17b03da6fcd4fbad34b518d9"} Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.730366 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.743011 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0f5cb979-1d90-4898-a6f5-9da614504640","Type":"ContainerDied","Data":"00f579f18602d84a5ed0911646099d41cfb08c1e2a2d8b7aeb82d961b0490d46"} Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.743053 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.743124 4911 scope.go:117] "RemoveContainer" containerID="0339b6cd6b72ae4b92773d75e9df42a3cbd647483046b48db94d52799677d829" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.757884 4911 generic.go:334] "Generic (PLEG): container finished" podID="80e5526c-70a7-44bb-9762-23caef81912d" containerID="f8b08ac8e651d30cd7517db26c90fe1282c1a6e1ce20ffcfa24f782b354f4a51" exitCode=0 Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.758821 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerDied","Data":"f8b08ac8e651d30cd7517db26c90fe1282c1a6e1ce20ffcfa24f782b354f4a51"} Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.810193 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.846670 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.864176 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57f4d4886c-m5nx6"] Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.864426 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerName="dnsmasq-dns" containerID="cri-o://b039da457a9b87cf180af68425b552d73129d54aa620a0a9160a62b776bc8715" gracePeriod=10 Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.865010 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.871386 4911 scope.go:117] "RemoveContainer" containerID="cca1a79fdda6d99b5881ffbf6d7c71f3d11ccd34d3aeb81626eb8c2eff0154e3" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.894300 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:21 crc kubenswrapper[4911]: E0606 09:32:21.894867 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api-log" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.894883 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api-log" Jun 06 09:32:21 crc kubenswrapper[4911]: E0606 09:32:21.894895 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.894902 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.895070 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api-log" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.898080 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" containerName="cinder-api" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.899238 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.906217 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.906348 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.906391 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.906472 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.961459 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.961519 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-config-data-custom\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.961539 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.963453 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-etc-localtime\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.963559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.963602 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f5cb979-1d90-4898-a6f5-9da614504640" path="/var/lib/kubelet/pods/0f5cb979-1d90-4898-a6f5-9da614504640/volumes" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.963618 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tz9m\" (UniqueName: \"kubernetes.io/projected/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-kube-api-access-8tz9m\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.964338 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-logs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.964467 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.964558 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="609b09fd-87cd-49be-b7ad-ddac6458a02c" path="/var/lib/kubelet/pods/609b09fd-87cd-49be-b7ad-ddac6458a02c/volumes" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.964578 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-scripts\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:21 crc kubenswrapper[4911]: I0606 09:32:21.964617 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-config-data\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067412 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tz9m\" (UniqueName: \"kubernetes.io/projected/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-kube-api-access-8tz9m\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067490 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-logs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067555 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067618 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-scripts\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067656 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-config-data\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067745 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067799 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-config-data-custom\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067822 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067881 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-etc-localtime\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.067963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.069065 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-logs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.069165 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.072932 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-etc-localtime\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.080744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.082923 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.083638 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-config-data\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.091562 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.092254 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-config-data-custom\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.094648 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-scripts\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.097463 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tz9m\" (UniqueName: \"kubernetes.io/projected/ceed9593-a84f-422c-9eb1-b5ab24bbb3b6-kube-api-access-8tz9m\") pod \"cinder-api-0\" (UID: \"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6\") " pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.120688 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.323606 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.521431 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-volume-volume1-0" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="cinder-volume" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.768999 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"5f46a595-e6b7-4e07-a817-8e37db292ace","Type":"ContainerStarted","Data":"cbc47cccbfa2c1c121ea765d4896b7a5223d3bb98d3b6d8b634f77095529a9a5"} Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.786110 4911 generic.go:334] "Generic (PLEG): container finished" podID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerID="b039da457a9b87cf180af68425b552d73129d54aa620a0a9160a62b776bc8715" exitCode=0 Jun 06 09:32:22 crc kubenswrapper[4911]: I0606 09:32:22.786124 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" event={"ID":"67d3c09a-3215-4dfe-8838-d621e317b13e","Type":"ContainerDied","Data":"b039da457a9b87cf180af68425b552d73129d54aa620a0a9160a62b776bc8715"} Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.035456 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.036958 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.093572 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-sb\") pod \"67d3c09a-3215-4dfe-8838-d621e317b13e\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.093630 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-swift-storage-0\") pod \"67d3c09a-3215-4dfe-8838-d621e317b13e\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.093703 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc\") pod \"67d3c09a-3215-4dfe-8838-d621e317b13e\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.093895 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p66vx\" (UniqueName: \"kubernetes.io/projected/67d3c09a-3215-4dfe-8838-d621e317b13e-kube-api-access-p66vx\") pod \"67d3c09a-3215-4dfe-8838-d621e317b13e\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.093934 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-nb\") pod \"67d3c09a-3215-4dfe-8838-d621e317b13e\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.093959 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-config\") pod \"67d3c09a-3215-4dfe-8838-d621e317b13e\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.182481 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67d3c09a-3215-4dfe-8838-d621e317b13e-kube-api-access-p66vx" (OuterVolumeSpecName: "kube-api-access-p66vx") pod "67d3c09a-3215-4dfe-8838-d621e317b13e" (UID: "67d3c09a-3215-4dfe-8838-d621e317b13e"). InnerVolumeSpecName "kube-api-access-p66vx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.197169 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p66vx\" (UniqueName: \"kubernetes.io/projected/67d3c09a-3215-4dfe-8838-d621e317b13e-kube-api-access-p66vx\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.222275 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "67d3c09a-3215-4dfe-8838-d621e317b13e" (UID: "67d3c09a-3215-4dfe-8838-d621e317b13e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.247665 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "67d3c09a-3215-4dfe-8838-d621e317b13e" (UID: "67d3c09a-3215-4dfe-8838-d621e317b13e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.257281 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-config" (OuterVolumeSpecName: "config") pod "67d3c09a-3215-4dfe-8838-d621e317b13e" (UID: "67d3c09a-3215-4dfe-8838-d621e317b13e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:23 crc kubenswrapper[4911]: E0606 09:32:23.261463 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc podName:67d3c09a-3215-4dfe-8838-d621e317b13e nodeName:}" failed. No retries permitted until 2025-06-06 09:32:23.761429241 +0000 UTC m=+1155.036854784 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc") pod "67d3c09a-3215-4dfe-8838-d621e317b13e" (UID: "67d3c09a-3215-4dfe-8838-d621e317b13e") : error deleting /var/lib/kubelet/pods/67d3c09a-3215-4dfe-8838-d621e317b13e/volume-subpaths: remove /var/lib/kubelet/pods/67d3c09a-3215-4dfe-8838-d621e317b13e/volume-subpaths: no such file or directory Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.261619 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "67d3c09a-3215-4dfe-8838-d621e317b13e" (UID: "67d3c09a-3215-4dfe-8838-d621e317b13e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.298758 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.298791 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.298808 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.298818 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.589457 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-58b6f8f48b-4n9zh" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:52506->10.217.0.159:9311: read: connection reset by peer" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.589458 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-58b6f8f48b-4n9zh" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:52508->10.217.0.159:9311: read: connection reset by peer" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.806976 4911 generic.go:334] "Generic (PLEG): container finished" podID="de868b44-b17e-418a-9194-58d24fc9af42" containerID="e5b37d525ddcec754daf792e9f0afaaeb17fe744728feffb749677d43f2d9609" exitCode=0 Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.807063 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b6f8f48b-4n9zh" event={"ID":"de868b44-b17e-418a-9194-58d24fc9af42","Type":"ContainerDied","Data":"e5b37d525ddcec754daf792e9f0afaaeb17fe744728feffb749677d43f2d9609"} Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.807807 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc\") pod \"67d3c09a-3215-4dfe-8838-d621e317b13e\" (UID: \"67d3c09a-3215-4dfe-8838-d621e317b13e\") " Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.808426 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "67d3c09a-3215-4dfe-8838-d621e317b13e" (UID: "67d3c09a-3215-4dfe-8838-d621e317b13e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.808602 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67d3c09a-3215-4dfe-8838-d621e317b13e-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.810884 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6","Type":"ContainerStarted","Data":"3a57746487feb2a90a05f6ce6ac63207aefe2aa2c7399afda9569bb733a8c6b5"} Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.819149 4911 generic.go:334] "Generic (PLEG): container finished" podID="80e5526c-70a7-44bb-9762-23caef81912d" containerID="48956eedc37ed9b718ba75a7fe9ac3bdaf030ebff4bac6f41e11e743071d3ba6" exitCode=0 Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.819197 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerDied","Data":"48956eedc37ed9b718ba75a7fe9ac3bdaf030ebff4bac6f41e11e743071d3ba6"} Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.821906 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"5f46a595-e6b7-4e07-a817-8e37db292ace","Type":"ContainerStarted","Data":"14f465da041af3a779af609bbda918d0982178afb4922db90b53a9b5e7f452dd"} Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.822068 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.829112 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" event={"ID":"67d3c09a-3215-4dfe-8838-d621e317b13e","Type":"ContainerDied","Data":"60487bd756fdd169539f538e9262d508d140656d7c31b4f50812367817b72223"} Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.829176 4911 scope.go:117] "RemoveContainer" containerID="b039da457a9b87cf180af68425b552d73129d54aa620a0a9160a62b776bc8715" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.829196 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:32:23 crc kubenswrapper[4911]: I0606 09:32:23.958128 4911 scope.go:117] "RemoveContainer" containerID="371f5d50f3e542ba446d10af4a092f6dfd875dde75b4b947f2789f080cdd24da" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.021404 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.052787 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.052764253 podStartE2EDuration="5.052764253s" podCreationTimestamp="2025-06-06 09:32:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:23.846664898 +0000 UTC m=+1155.122090451" watchObservedRunningTime="2025-06-06 09:32:24.052764253 +0000 UTC m=+1155.328189806" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.210457 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.215545 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-run-httpd\") pod \"80e5526c-70a7-44bb-9762-23caef81912d\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.215628 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-config-data\") pod \"80e5526c-70a7-44bb-9762-23caef81912d\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.215661 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-log-httpd\") pod \"80e5526c-70a7-44bb-9762-23caef81912d\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.215804 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-scripts\") pod \"80e5526c-70a7-44bb-9762-23caef81912d\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.215836 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kww9p\" (UniqueName: \"kubernetes.io/projected/80e5526c-70a7-44bb-9762-23caef81912d-kube-api-access-kww9p\") pod \"80e5526c-70a7-44bb-9762-23caef81912d\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.215920 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-combined-ca-bundle\") pod \"80e5526c-70a7-44bb-9762-23caef81912d\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.216208 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-sg-core-conf-yaml\") pod \"80e5526c-70a7-44bb-9762-23caef81912d\" (UID: \"80e5526c-70a7-44bb-9762-23caef81912d\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.216313 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "80e5526c-70a7-44bb-9762-23caef81912d" (UID: "80e5526c-70a7-44bb-9762-23caef81912d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.216552 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "80e5526c-70a7-44bb-9762-23caef81912d" (UID: "80e5526c-70a7-44bb-9762-23caef81912d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.217741 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-run-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.217803 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/80e5526c-70a7-44bb-9762-23caef81912d-log-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.222949 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80e5526c-70a7-44bb-9762-23caef81912d-kube-api-access-kww9p" (OuterVolumeSpecName: "kube-api-access-kww9p") pod "80e5526c-70a7-44bb-9762-23caef81912d" (UID: "80e5526c-70a7-44bb-9762-23caef81912d"). InnerVolumeSpecName "kube-api-access-kww9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.223479 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-scripts" (OuterVolumeSpecName: "scripts") pod "80e5526c-70a7-44bb-9762-23caef81912d" (UID: "80e5526c-70a7-44bb-9762-23caef81912d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.254722 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "80e5526c-70a7-44bb-9762-23caef81912d" (UID: "80e5526c-70a7-44bb-9762-23caef81912d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.300026 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.300119 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.300180 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.301033 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6b9847c4a123626a7be96b480b8b31ed0796d77df359c9b4543cc2db6085b4ee"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.301124 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://6b9847c4a123626a7be96b480b8b31ed0796d77df359c9b4543cc2db6085b4ee" gracePeriod=600 Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.308707 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80e5526c-70a7-44bb-9762-23caef81912d" (UID: "80e5526c-70a7-44bb-9762-23caef81912d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.318892 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltrwb\" (UniqueName: \"kubernetes.io/projected/de868b44-b17e-418a-9194-58d24fc9af42-kube-api-access-ltrwb\") pod \"de868b44-b17e-418a-9194-58d24fc9af42\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.319043 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de868b44-b17e-418a-9194-58d24fc9af42-logs\") pod \"de868b44-b17e-418a-9194-58d24fc9af42\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.319172 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-combined-ca-bundle\") pod \"de868b44-b17e-418a-9194-58d24fc9af42\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.319487 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data-custom\") pod \"de868b44-b17e-418a-9194-58d24fc9af42\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.319556 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data\") pod \"de868b44-b17e-418a-9194-58d24fc9af42\" (UID: \"de868b44-b17e-418a-9194-58d24fc9af42\") " Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.319633 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de868b44-b17e-418a-9194-58d24fc9af42-logs" (OuterVolumeSpecName: "logs") pod "de868b44-b17e-418a-9194-58d24fc9af42" (UID: "de868b44-b17e-418a-9194-58d24fc9af42"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.320645 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.320670 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de868b44-b17e-418a-9194-58d24fc9af42-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.320682 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.320691 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.320700 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kww9p\" (UniqueName: \"kubernetes.io/projected/80e5526c-70a7-44bb-9762-23caef81912d-kube-api-access-kww9p\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.324300 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "de868b44-b17e-418a-9194-58d24fc9af42" (UID: "de868b44-b17e-418a-9194-58d24fc9af42"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.324330 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de868b44-b17e-418a-9194-58d24fc9af42-kube-api-access-ltrwb" (OuterVolumeSpecName: "kube-api-access-ltrwb") pod "de868b44-b17e-418a-9194-58d24fc9af42" (UID: "de868b44-b17e-418a-9194-58d24fc9af42"). InnerVolumeSpecName "kube-api-access-ltrwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.342340 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-config-data" (OuterVolumeSpecName: "config-data") pod "80e5526c-70a7-44bb-9762-23caef81912d" (UID: "80e5526c-70a7-44bb-9762-23caef81912d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.366452 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de868b44-b17e-418a-9194-58d24fc9af42" (UID: "de868b44-b17e-418a-9194-58d24fc9af42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.392760 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data" (OuterVolumeSpecName: "config-data") pod "de868b44-b17e-418a-9194-58d24fc9af42" (UID: "de868b44-b17e-418a-9194-58d24fc9af42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.422481 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.422514 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80e5526c-70a7-44bb-9762-23caef81912d-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.422523 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.422531 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de868b44-b17e-418a-9194-58d24fc9af42-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.422541 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltrwb\" (UniqueName: \"kubernetes.io/projected/de868b44-b17e-418a-9194-58d24fc9af42-kube-api-access-ltrwb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.844521 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"80e5526c-70a7-44bb-9762-23caef81912d","Type":"ContainerDied","Data":"1949c9dee80204695cb600c725efc2e743413036dfe5fb9970cef42ddf60ee18"} Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.844559 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.844934 4911 scope.go:117] "RemoveContainer" containerID="f8b08ac8e651d30cd7517db26c90fe1282c1a6e1ce20ffcfa24f782b354f4a51" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.851887 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="6b9847c4a123626a7be96b480b8b31ed0796d77df359c9b4543cc2db6085b4ee" exitCode=0 Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.851962 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"6b9847c4a123626a7be96b480b8b31ed0796d77df359c9b4543cc2db6085b4ee"} Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.851992 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"b85be7d4abe7e6126686c6349bcc9d33572e190ba6f10a48055108480e2a3749"} Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.860518 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-58b6f8f48b-4n9zh" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.860518 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-58b6f8f48b-4n9zh" event={"ID":"de868b44-b17e-418a-9194-58d24fc9af42","Type":"ContainerDied","Data":"644a9678f18c567cab1d8fe4ef4195edcd0fdc46ba518524a219bde3a5289645"} Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.863447 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6","Type":"ContainerStarted","Data":"745c427f3c908d9abcdc8c68c45b7093118146e6a7f363959457b695b9d4815c"} Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.863514 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ceed9593-a84f-422c-9eb1-b5ab24bbb3b6","Type":"ContainerStarted","Data":"1bd2caf9f53efe16c4afd19fa5e0c5a3ec9832b1e6412574b5e9a2497c91b6f8"} Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.905529 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.915838 4911 scope.go:117] "RemoveContainer" containerID="703c9552807275eeadb12da0272fe0a11e50f9a9e334262d3b7f6607b7eaae19" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.923495 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.939506 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.939946 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerName="init" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.939967 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerName="init" Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.939982 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-central-agent" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.939998 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-central-agent" Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.940012 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="sg-core" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940018 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="sg-core" Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.940033 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="proxy-httpd" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940039 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="proxy-httpd" Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.940053 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940059 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api" Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.940068 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-notification-agent" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940073 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-notification-agent" Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.940086 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerName="dnsmasq-dns" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940107 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerName="dnsmasq-dns" Jun 06 09:32:24 crc kubenswrapper[4911]: E0606 09:32:24.940127 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api-log" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940132 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api-log" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940323 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" containerName="dnsmasq-dns" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940343 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940363 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-notification-agent" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940382 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="de868b44-b17e-418a-9194-58d24fc9af42" containerName="barbican-api-log" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940397 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="proxy-httpd" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940413 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="sg-core" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.940434 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="80e5526c-70a7-44bb-9762-23caef81912d" containerName="ceilometer-central-agent" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.941649 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.941625549 podStartE2EDuration="3.941625549s" podCreationTimestamp="2025-06-06 09:32:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:24.914621368 +0000 UTC m=+1156.190046911" watchObservedRunningTime="2025-06-06 09:32:24.941625549 +0000 UTC m=+1156.217051092" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.942349 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.945072 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.946216 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.966635 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.974360 4911 scope.go:117] "RemoveContainer" containerID="f22ac509253f2fc580f47a38b62dfa390bddb856f17dce57068a38f8f30ac1fa" Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.975190 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-58b6f8f48b-4n9zh"] Jun 06 09:32:24 crc kubenswrapper[4911]: I0606 09:32:24.982925 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-58b6f8f48b-4n9zh"] Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.000665 4911 scope.go:117] "RemoveContainer" containerID="48956eedc37ed9b718ba75a7fe9ac3bdaf030ebff4bac6f41e11e743071d3ba6" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.035416 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-run-httpd\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.035482 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-config-data\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.035507 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-scripts\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.035551 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.035654 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-log-httpd\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.035681 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwhmw\" (UniqueName: \"kubernetes.io/projected/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-kube-api-access-nwhmw\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.035746 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.041272 4911 scope.go:117] "RemoveContainer" containerID="7e9cf2d10460184da893979bc2c9b0439be07aa529d8fb6fe4706920d07685c7" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.094077 4911 scope.go:117] "RemoveContainer" containerID="e5b37d525ddcec754daf792e9f0afaaeb17fe744728feffb749677d43f2d9609" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.118075 4911 scope.go:117] "RemoveContainer" containerID="d56122564610d000fecbf3e19a2ad90f1d89a0bd6a45975fc3142fd705e352c9" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.137755 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.137875 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-run-httpd\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.137914 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-config-data\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.137932 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-scripts\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.137970 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.138028 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-log-httpd\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.138048 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwhmw\" (UniqueName: \"kubernetes.io/projected/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-kube-api-access-nwhmw\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.138434 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-run-httpd\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.138645 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-log-httpd\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.144217 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.144764 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-config-data\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.151306 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-scripts\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.156162 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.168068 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwhmw\" (UniqueName: \"kubernetes.io/projected/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-kube-api-access-nwhmw\") pod \"ceilometer-0\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.279902 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.880998 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.960528 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80e5526c-70a7-44bb-9762-23caef81912d" path="/var/lib/kubelet/pods/80e5526c-70a7-44bb-9762-23caef81912d/volumes" Jun 06 09:32:25 crc kubenswrapper[4911]: I0606 09:32:25.965056 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de868b44-b17e-418a-9194-58d24fc9af42" path="/var/lib/kubelet/pods/de868b44-b17e-418a-9194-58d24fc9af42/volumes" Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.135619 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.186249 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.218642 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.242888 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.256868 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.318486 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.909357 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="cinder-scheduler" containerID="cri-o://a896e44409219f1d9c63a72a06b4503a5a4dd13efb7a3e385afec726f6c1d256" gracePeriod=30 Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.909969 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-backup-0" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="cinder-backup" containerID="cri-o://ad6bf48e7eaa52143e9c84d5f8eeb454e4462460800a93ad0c837d41cc2675c9" gracePeriod=30 Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.910071 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="probe" containerID="cri-o://05e24bed45229dcc5ed1d86b595cd98de482685be1964564bd2063e3ded505f4" gracePeriod=30 Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.910166 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-backup-0" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="probe" containerID="cri-o://ae93c1123ff5cc08f03fe458427bdc45141e8e2c0e6ce6afd19f8efd56eea572" gracePeriod=30 Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.910373 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-volume-volume1-0" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="cinder-volume" containerID="cri-o://6c3c8c1ebfedecd7827ed1c762f828423cabcc481c7c87c0e69e87f5059d675a" gracePeriod=30 Jun 06 09:32:27 crc kubenswrapper[4911]: I0606 09:32:27.910489 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-volume-volume1-0" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="probe" containerID="cri-o://9e37cbef049348eda0dd25ecd145fd0216187016081578e8bfa7bfb621329a92" gracePeriod=30 Jun 06 09:32:28 crc kubenswrapper[4911]: I0606 09:32:28.714511 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:32:28 crc kubenswrapper[4911]: I0606 09:32:28.922673 4911 generic.go:334] "Generic (PLEG): container finished" podID="695d811f-7aed-428d-b224-e6711323e54b" containerID="6c3c8c1ebfedecd7827ed1c762f828423cabcc481c7c87c0e69e87f5059d675a" exitCode=0 Jun 06 09:32:28 crc kubenswrapper[4911]: I0606 09:32:28.922862 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"695d811f-7aed-428d-b224-e6711323e54b","Type":"ContainerDied","Data":"6c3c8c1ebfedecd7827ed1c762f828423cabcc481c7c87c0e69e87f5059d675a"} Jun 06 09:32:28 crc kubenswrapper[4911]: I0606 09:32:28.926672 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"074afac4-64d1-4af0-a4fc-82e118a23756","Type":"ContainerStarted","Data":"f070e07587f58137b8549c14a8280b660a6be808a33526e89e3de3e0774bafeb"} Jun 06 09:32:28 crc kubenswrapper[4911]: I0606 09:32:28.930580 4911 generic.go:334] "Generic (PLEG): container finished" podID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerID="05e24bed45229dcc5ed1d86b595cd98de482685be1964564bd2063e3ded505f4" exitCode=0 Jun 06 09:32:28 crc kubenswrapper[4911]: I0606 09:32:28.930632 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9cd217ab-c7bb-4850-9cce-be28e294667c","Type":"ContainerDied","Data":"05e24bed45229dcc5ed1d86b595cd98de482685be1964564bd2063e3ded505f4"} Jun 06 09:32:28 crc kubenswrapper[4911]: I0606 09:32:28.932243 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerStarted","Data":"b73837d9cedf1ade100b0de24e2ffb29f8ab08690540ae9a1d7dcf87396eb76c"} Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.943851 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"074afac4-64d1-4af0-a4fc-82e118a23756","Type":"ContainerStarted","Data":"fc7d80efb3e46dd01b815948021265703231304a9a6115bc5e0ef821692a19af"} Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.945927 4911 generic.go:334] "Generic (PLEG): container finished" podID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerID="a896e44409219f1d9c63a72a06b4503a5a4dd13efb7a3e385afec726f6c1d256" exitCode=0 Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.945948 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9cd217ab-c7bb-4850-9cce-be28e294667c","Type":"ContainerDied","Data":"a896e44409219f1d9c63a72a06b4503a5a4dd13efb7a3e385afec726f6c1d256"} Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.947909 4911 generic.go:334] "Generic (PLEG): container finished" podID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerID="ae93c1123ff5cc08f03fe458427bdc45141e8e2c0e6ce6afd19f8efd56eea572" exitCode=0 Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.951205 4911 generic.go:334] "Generic (PLEG): container finished" podID="695d811f-7aed-428d-b224-e6711323e54b" containerID="9e37cbef049348eda0dd25ecd145fd0216187016081578e8bfa7bfb621329a92" exitCode=0 Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.962486 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"f60f5eac-7f8c-4909-ae10-449d6e6df432","Type":"ContainerDied","Data":"ae93c1123ff5cc08f03fe458427bdc45141e8e2c0e6ce6afd19f8efd56eea572"} Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.962528 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerStarted","Data":"c7727c01e2fcc2cbcea1cfa164f6f8f40cdb6d4f2f1d356d7ff5d342baa49b54"} Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.962542 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"695d811f-7aed-428d-b224-e6711323e54b","Type":"ContainerDied","Data":"9e37cbef049348eda0dd25ecd145fd0216187016081578e8bfa7bfb621329a92"} Jun 06 09:32:29 crc kubenswrapper[4911]: I0606 09:32:29.980580 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=4.377194474 podStartE2EDuration="21.980555144s" podCreationTimestamp="2025-06-06 09:32:08 +0000 UTC" firstStartedPulling="2025-06-06 09:32:10.598390802 +0000 UTC m=+1141.873816345" lastFinishedPulling="2025-06-06 09:32:28.201751472 +0000 UTC m=+1159.477177015" observedRunningTime="2025-06-06 09:32:29.976530021 +0000 UTC m=+1161.251955584" watchObservedRunningTime="2025-06-06 09:32:29.980555144 +0000 UTC m=+1161.255980687" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.072441 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.151077 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data\") pod \"9cd217ab-c7bb-4850-9cce-be28e294667c\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.151244 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-combined-ca-bundle\") pod \"9cd217ab-c7bb-4850-9cce-be28e294667c\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.151284 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data-custom\") pod \"9cd217ab-c7bb-4850-9cce-be28e294667c\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.151599 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-scripts\") pod \"9cd217ab-c7bb-4850-9cce-be28e294667c\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.151755 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c422\" (UniqueName: \"kubernetes.io/projected/9cd217ab-c7bb-4850-9cce-be28e294667c-kube-api-access-2c422\") pod \"9cd217ab-c7bb-4850-9cce-be28e294667c\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.151891 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-machine-id\") pod \"9cd217ab-c7bb-4850-9cce-be28e294667c\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.151949 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-localtime\") pod \"9cd217ab-c7bb-4850-9cce-be28e294667c\" (UID: \"9cd217ab-c7bb-4850-9cce-be28e294667c\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.154159 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "9cd217ab-c7bb-4850-9cce-be28e294667c" (UID: "9cd217ab-c7bb-4850-9cce-be28e294667c"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.154223 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9cd217ab-c7bb-4850-9cce-be28e294667c" (UID: "9cd217ab-c7bb-4850-9cce-be28e294667c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.172359 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9cd217ab-c7bb-4850-9cce-be28e294667c" (UID: "9cd217ab-c7bb-4850-9cce-be28e294667c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.189300 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-scripts" (OuterVolumeSpecName: "scripts") pod "9cd217ab-c7bb-4850-9cce-be28e294667c" (UID: "9cd217ab-c7bb-4850-9cce-be28e294667c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.206476 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cd217ab-c7bb-4850-9cce-be28e294667c-kube-api-access-2c422" (OuterVolumeSpecName: "kube-api-access-2c422") pod "9cd217ab-c7bb-4850-9cce-be28e294667c" (UID: "9cd217ab-c7bb-4850-9cce-be28e294667c"). InnerVolumeSpecName "kube-api-access-2c422". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.265530 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.265605 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.265620 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c422\" (UniqueName: \"kubernetes.io/projected/9cd217ab-c7bb-4850-9cce-be28e294667c-kube-api-access-2c422\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.265640 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.265654 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/9cd217ab-c7bb-4850-9cce-be28e294667c-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.401678 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9cd217ab-c7bb-4850-9cce-be28e294667c" (UID: "9cd217ab-c7bb-4850-9cce-be28e294667c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.433150 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.471070 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.491262 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data" (OuterVolumeSpecName: "config-data") pod "9cd217ab-c7bb-4850-9cce-be28e294667c" (UID: "9cd217ab-c7bb-4850-9cce-be28e294667c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.571749 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data-custom\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.572325 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsnpb\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-kube-api-access-jsnpb\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.572468 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.572581 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-ceph\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.572714 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-localtime\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.572821 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.572977 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-run" (OuterVolumeSpecName: "run") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573148 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-run\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573262 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-dev\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573410 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-cinder\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573507 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-lib-cinder\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573590 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-scripts\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573681 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-combined-ca-bundle\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573839 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-nvme\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.573930 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-machine-id\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.574062 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-brick\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.574224 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-iscsi\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.574333 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-lib-modules\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.574442 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-sys\") pod \"695d811f-7aed-428d-b224-e6711323e54b\" (UID: \"695d811f-7aed-428d-b224-e6711323e54b\") " Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575209 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575316 4911 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-run\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.577442 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd217ab-c7bb-4850-9cce-be28e294667c-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575504 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575505 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-sys" (OuterVolumeSpecName: "sys") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575534 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575532 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575552 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-cinder" (OuterVolumeSpecName: "var-locks-cinder") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "var-locks-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575553 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575571 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-dev" (OuterVolumeSpecName: "dev") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575577 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-lib-cinder" (OuterVolumeSpecName: "var-lib-cinder") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "var-lib-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.575586 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.576996 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-kube-api-access-jsnpb" (OuterVolumeSpecName: "kube-api-access-jsnpb") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "kube-api-access-jsnpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.577442 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.579327 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-scripts" (OuterVolumeSpecName: "scripts") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.581371 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-ceph" (OuterVolumeSpecName: "ceph") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.656138 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679541 4911 reconciler_common.go:293] "Volume detached for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-cinder\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679573 4911 reconciler_common.go:293] "Volume detached for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-lib-cinder\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679585 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679603 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679618 4911 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-nvme\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679631 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679641 4911 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-var-locks-brick\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679652 4911 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-etc-iscsi\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679663 4911 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-lib-modules\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679677 4911 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-sys\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679688 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679700 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsnpb\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-kube-api-access-jsnpb\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679711 4911 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/695d811f-7aed-428d-b224-e6711323e54b-ceph\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.679721 4911 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/695d811f-7aed-428d-b224-e6711323e54b-dev\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.707798 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data" (OuterVolumeSpecName: "config-data") pod "695d811f-7aed-428d-b224-e6711323e54b" (UID: "695d811f-7aed-428d-b224-e6711323e54b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.781848 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/695d811f-7aed-428d-b224-e6711323e54b-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.962654 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"695d811f-7aed-428d-b224-e6711323e54b","Type":"ContainerDied","Data":"bea911b5f4bf9f55bf02d91e9302f03b954adce42a2ec88b2a2950f4703c14e6"} Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.962669 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.963854 4911 scope.go:117] "RemoveContainer" containerID="9e37cbef049348eda0dd25ecd145fd0216187016081578e8bfa7bfb621329a92" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.965049 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9cd217ab-c7bb-4850-9cce-be28e294667c","Type":"ContainerDied","Data":"735a3708ec2d2f875682741bc3e6dba6df6e82dd68a14d29734f10afc0c9a280"} Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.965083 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jun 06 09:32:30 crc kubenswrapper[4911]: I0606 09:32:30.967032 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerStarted","Data":"3dd4662179adf4c8b70a0f3572c715c3e21b380a3394d8803d67149214cef055"} Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.009125 4911 scope.go:117] "RemoveContainer" containerID="6c3c8c1ebfedecd7827ed1c762f828423cabcc481c7c87c0e69e87f5059d675a" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.013728 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.069935 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.091990 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.092487 4911 scope.go:117] "RemoveContainer" containerID="05e24bed45229dcc5ed1d86b595cd98de482685be1964564bd2063e3ded505f4" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.110499 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: E0606 09:32:31.111747 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="cinder-scheduler" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.111807 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="cinder-scheduler" Jun 06 09:32:31 crc kubenswrapper[4911]: E0606 09:32:31.111831 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="cinder-volume" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.111841 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="cinder-volume" Jun 06 09:32:31 crc kubenswrapper[4911]: E0606 09:32:31.111851 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="probe" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.111861 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="probe" Jun 06 09:32:31 crc kubenswrapper[4911]: E0606 09:32:31.111891 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="probe" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.111899 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="probe" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.112772 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="cinder-scheduler" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.112851 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" containerName="probe" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.112866 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="probe" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.112879 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="695d811f-7aed-428d-b224-e6711323e54b" containerName="cinder-volume" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.114025 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.119060 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.122957 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.140399 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.153251 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.155958 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.158622 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.169612 4911 scope.go:117] "RemoveContainer" containerID="a896e44409219f1d9c63a72a06b4503a5a4dd13efb7a3e385afec726f6c1d256" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.169838 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.190793 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-etc-localtime\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.190867 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.190895 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.191245 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-config-data\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.191321 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-scripts\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.191598 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6lhj\" (UniqueName: \"kubernetes.io/projected/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-kube-api-access-q6lhj\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.191659 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.294492 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295076 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6lhj\" (UniqueName: \"kubernetes.io/projected/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-kube-api-access-q6lhj\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295266 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295354 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295381 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295414 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-localtime\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295465 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295508 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-etc-localtime\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295535 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-sys\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295583 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-dev\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295629 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-etc-localtime\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295646 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.295924 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296110 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296147 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296189 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296281 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f4fe571-77a4-4d40-843e-f5ed6091158e-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296360 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296389 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl7ns\" (UniqueName: \"kubernetes.io/projected/4f4fe571-77a4-4d40-843e-f5ed6091158e-kube-api-access-sl7ns\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296410 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-run\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296440 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-config-data\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296465 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296491 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-scripts\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296506 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.296598 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.300957 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-scripts\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.301120 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.304350 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-config-data\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.306114 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.318428 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6lhj\" (UniqueName: \"kubernetes.io/projected/a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a-kube-api-access-q6lhj\") pod \"cinder-scheduler-0\" (UID: \"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a\") " pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398480 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398552 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398588 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-localtime\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398616 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398651 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-sys\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398674 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398686 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398726 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-sys\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398699 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-dev\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-dev\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398693 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-localtime\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398782 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398804 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398879 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398926 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.398962 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399018 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f4fe571-77a4-4d40-843e-f5ed6091158e-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399059 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399067 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399118 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl7ns\" (UniqueName: \"kubernetes.io/projected/4f4fe571-77a4-4d40-843e-f5ed6091158e-kube-api-access-sl7ns\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399414 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-run\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399138 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399454 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399599 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399611 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399604 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-run\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399657 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4f4fe571-77a4-4d40-843e-f5ed6091158e-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.399796 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.403460 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.403642 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.403920 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4f4fe571-77a4-4d40-843e-f5ed6091158e-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.404548 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.405743 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f4fe571-77a4-4d40-843e-f5ed6091158e-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.420833 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl7ns\" (UniqueName: \"kubernetes.io/projected/4f4fe571-77a4-4d40-843e-f5ed6091158e-kube-api-access-sl7ns\") pod \"cinder-volume-volume1-0\" (UID: \"4f4fe571-77a4-4d40-843e-f5ed6091158e\") " pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.468061 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.478215 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.661890 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.740030 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.963505 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="695d811f-7aed-428d-b224-e6711323e54b" path="/var/lib/kubelet/pods/695d811f-7aed-428d-b224-e6711323e54b/volumes" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.964152 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cd217ab-c7bb-4850-9cce-be28e294667c" path="/var/lib/kubelet/pods/9cd217ab-c7bb-4850-9cce-be28e294667c/volumes" Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.988503 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerStarted","Data":"073e38b9a7e8987546b532264cde763b88a042bd531364f08f0dfb36f1757b6e"} Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.998783 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="manila-scheduler" containerID="cri-o://389a8d1ce8531cd3a04ebb23f6d4d12dd12da9be2a643fce93da2c248b8dbe1e" gracePeriod=30 Jun 06 09:32:31 crc kubenswrapper[4911]: I0606 09:32:31.999427 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="probe" containerID="cri-o://e59874f9b16a48d6ffb8e9ec63945e74d9de29139d79c8eef773dd6a8df0c8c1" gracePeriod=30 Jun 06 09:32:32 crc kubenswrapper[4911]: I0606 09:32:32.010840 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jun 06 09:32:32 crc kubenswrapper[4911]: I0606 09:32:32.323700 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Jun 06 09:32:32 crc kubenswrapper[4911]: W0606 09:32:32.334132 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f4fe571_77a4_4d40_843e_f5ed6091158e.slice/crio-f4d5a2735734891cd89b413a4bf15f7ca7c12332053e50fb0c6aa3b5f96eb2f9 WatchSource:0}: Error finding container f4d5a2735734891cd89b413a4bf15f7ca7c12332053e50fb0c6aa3b5f96eb2f9: Status 404 returned error can't find the container with id f4d5a2735734891cd89b413a4bf15f7ca7c12332053e50fb0c6aa3b5f96eb2f9 Jun 06 09:32:33 crc kubenswrapper[4911]: I0606 09:32:33.008530 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"4f4fe571-77a4-4d40-843e-f5ed6091158e","Type":"ContainerStarted","Data":"f4d5a2735734891cd89b413a4bf15f7ca7c12332053e50fb0c6aa3b5f96eb2f9"} Jun 06 09:32:33 crc kubenswrapper[4911]: I0606 09:32:33.009821 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a","Type":"ContainerStarted","Data":"f1b7828054392e571dfacbe3a7e7b813a7aaf5d8fb5da5b26b6fb17532c668a8"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.052422 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerStarted","Data":"74622eae93c5196a3c70307c4d89a71379eae1a53a92bc12e388051584ff9f3b"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.054220 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.066403 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a","Type":"ContainerStarted","Data":"59df675cfdeeac4ea1fed47854be041a70af32c944cbfd1d98df989cdae11bfe"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.077009 4911 generic.go:334] "Generic (PLEG): container finished" podID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerID="e59874f9b16a48d6ffb8e9ec63945e74d9de29139d79c8eef773dd6a8df0c8c1" exitCode=0 Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.077039 4911 generic.go:334] "Generic (PLEG): container finished" podID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerID="389a8d1ce8531cd3a04ebb23f6d4d12dd12da9be2a643fce93da2c248b8dbe1e" exitCode=0 Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.077087 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"31189f8d-bd76-4b64-84d2-193881b6b47a","Type":"ContainerDied","Data":"e59874f9b16a48d6ffb8e9ec63945e74d9de29139d79c8eef773dd6a8df0c8c1"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.077141 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"31189f8d-bd76-4b64-84d2-193881b6b47a","Type":"ContainerDied","Data":"389a8d1ce8531cd3a04ebb23f6d4d12dd12da9be2a643fce93da2c248b8dbe1e"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.092698 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.426075614 podStartE2EDuration="10.09266025s" podCreationTimestamp="2025-06-06 09:32:24 +0000 UTC" firstStartedPulling="2025-06-06 09:32:28.738702883 +0000 UTC m=+1160.014128426" lastFinishedPulling="2025-06-06 09:32:33.405287509 +0000 UTC m=+1164.680713062" observedRunningTime="2025-06-06 09:32:34.077123403 +0000 UTC m=+1165.352548946" watchObservedRunningTime="2025-06-06 09:32:34.09266025 +0000 UTC m=+1165.368085793" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.095374 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"4f4fe571-77a4-4d40-843e-f5ed6091158e","Type":"ContainerStarted","Data":"ceddd7ae59894dc092cf8c7859dd4822ad777dfd0f07fa849e25d758bbdc3ae6"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.095423 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"4f4fe571-77a4-4d40-843e-f5ed6091158e","Type":"ContainerStarted","Data":"4eb67c210b2ebfbb3bf641793dfe399acf47744b0634f3ed81ddda544838716c"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.101777 4911 generic.go:334] "Generic (PLEG): container finished" podID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerID="ad6bf48e7eaa52143e9c84d5f8eeb454e4462460800a93ad0c837d41cc2675c9" exitCode=0 Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.102017 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"f60f5eac-7f8c-4909-ae10-449d6e6df432","Type":"ContainerDied","Data":"ad6bf48e7eaa52143e9c84d5f8eeb454e4462460800a93ad0c837d41cc2675c9"} Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.135399 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.135379554 podStartE2EDuration="3.135379554s" podCreationTimestamp="2025-06-06 09:32:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:34.128717543 +0000 UTC m=+1165.404143106" watchObservedRunningTime="2025-06-06 09:32:34.135379554 +0000 UTC m=+1165.410805097" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.515524 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.700953 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-nvme\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701308 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-lib-cinder\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701350 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlgvc\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-kube-api-access-nlgvc\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701369 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-run\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701417 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-machine-id\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701445 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701480 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-iscsi\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701517 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-dev\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701553 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-lib-modules\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701570 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-scripts\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701641 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-sys\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701656 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-ceph\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701680 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data-custom\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701705 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-localtime\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701760 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-cinder\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701795 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-combined-ca-bundle\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701825 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-brick\") pod \"f60f5eac-7f8c-4909-ae10-449d6e6df432\" (UID: \"f60f5eac-7f8c-4909-ae10-449d6e6df432\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.701253 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702284 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702342 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-sys" (OuterVolumeSpecName: "sys") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702359 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-lib-cinder" (OuterVolumeSpecName: "var-lib-cinder") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "var-lib-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702401 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-dev" (OuterVolumeSpecName: "dev") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702455 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702764 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-run" (OuterVolumeSpecName: "run") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702806 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702832 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.702968 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-cinder" (OuterVolumeSpecName: "var-locks-cinder") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "var-locks-cinder". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.710743 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-scripts" (OuterVolumeSpecName: "scripts") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.718026 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.718030 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-kube-api-access-nlgvc" (OuterVolumeSpecName: "kube-api-access-nlgvc") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "kube-api-access-nlgvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.718256 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-ceph" (OuterVolumeSpecName: "ceph") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.799530 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.818779 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819190 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819229 4911 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-brick\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819239 4911 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-nvme\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819247 4911 reconciler_common.go:293] "Volume detached for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-lib-cinder\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819257 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlgvc\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-kube-api-access-nlgvc\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819268 4911 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-run\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819277 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819286 4911 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-iscsi\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819296 4911 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-dev\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819304 4911 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-lib-modules\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819313 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819323 4911 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-sys\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819331 4911 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f60f5eac-7f8c-4909-ae10-449d6e6df432-ceph\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819339 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819348 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.819355 4911 reconciler_common.go:293] "Volume detached for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f60f5eac-7f8c-4909-ae10-449d6e6df432-var-locks-cinder\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.920122 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-combined-ca-bundle\") pod \"31189f8d-bd76-4b64-84d2-193881b6b47a\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.920206 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-scripts\") pod \"31189f8d-bd76-4b64-84d2-193881b6b47a\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.920229 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-machine-id\") pod \"31189f8d-bd76-4b64-84d2-193881b6b47a\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.920253 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data\") pod \"31189f8d-bd76-4b64-84d2-193881b6b47a\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.920284 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c99n\" (UniqueName: \"kubernetes.io/projected/31189f8d-bd76-4b64-84d2-193881b6b47a-kube-api-access-9c99n\") pod \"31189f8d-bd76-4b64-84d2-193881b6b47a\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.920307 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-localtime\") pod \"31189f8d-bd76-4b64-84d2-193881b6b47a\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.920353 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data-custom\") pod \"31189f8d-bd76-4b64-84d2-193881b6b47a\" (UID: \"31189f8d-bd76-4b64-84d2-193881b6b47a\") " Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.921161 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "31189f8d-bd76-4b64-84d2-193881b6b47a" (UID: "31189f8d-bd76-4b64-84d2-193881b6b47a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.921793 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "31189f8d-bd76-4b64-84d2-193881b6b47a" (UID: "31189f8d-bd76-4b64-84d2-193881b6b47a"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.927259 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data" (OuterVolumeSpecName: "config-data") pod "f60f5eac-7f8c-4909-ae10-449d6e6df432" (UID: "f60f5eac-7f8c-4909-ae10-449d6e6df432"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.929195 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "31189f8d-bd76-4b64-84d2-193881b6b47a" (UID: "31189f8d-bd76-4b64-84d2-193881b6b47a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.929332 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-scripts" (OuterVolumeSpecName: "scripts") pod "31189f8d-bd76-4b64-84d2-193881b6b47a" (UID: "31189f8d-bd76-4b64-84d2-193881b6b47a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.931340 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31189f8d-bd76-4b64-84d2-193881b6b47a-kube-api-access-9c99n" (OuterVolumeSpecName: "kube-api-access-9c99n") pod "31189f8d-bd76-4b64-84d2-193881b6b47a" (UID: "31189f8d-bd76-4b64-84d2-193881b6b47a"). InnerVolumeSpecName "kube-api-access-9c99n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:34 crc kubenswrapper[4911]: I0606 09:32:34.994337 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31189f8d-bd76-4b64-84d2-193881b6b47a" (UID: "31189f8d-bd76-4b64-84d2-193881b6b47a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.022947 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c99n\" (UniqueName: \"kubernetes.io/projected/31189f8d-bd76-4b64-84d2-193881b6b47a-kube-api-access-9c99n\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.023004 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.023017 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.023028 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f60f5eac-7f8c-4909-ae10-449d6e6df432-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.023271 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.024213 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.024239 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/31189f8d-bd76-4b64-84d2-193881b6b47a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.077474 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data" (OuterVolumeSpecName: "config-data") pod "31189f8d-bd76-4b64-84d2-193881b6b47a" (UID: "31189f8d-bd76-4b64-84d2-193881b6b47a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.113148 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"f60f5eac-7f8c-4909-ae10-449d6e6df432","Type":"ContainerDied","Data":"48e7076c053c889bee684076408ceb20232c4c7edfd074615be9282fa137e2a7"} Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.113200 4911 scope.go:117] "RemoveContainer" containerID="ae93c1123ff5cc08f03fe458427bdc45141e8e2c0e6ce6afd19f8efd56eea572" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.113337 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.119469 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a","Type":"ContainerStarted","Data":"4faeabab782e4f763b5a2707d205d56decb4a85a9222fb7681757b8cc7ae0c7e"} Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.125272 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.125502 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"31189f8d-bd76-4b64-84d2-193881b6b47a","Type":"ContainerDied","Data":"79d7806c90c2a35d7dedfaca2c51a668cef002f327543a456b149cc4db6f08cd"} Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.127041 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31189f8d-bd76-4b64-84d2-193881b6b47a-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.129661 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.149793 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.149769754 podStartE2EDuration="4.149769754s" podCreationTimestamp="2025-06-06 09:32:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:35.144351425 +0000 UTC m=+1166.419776988" watchObservedRunningTime="2025-06-06 09:32:35.149769754 +0000 UTC m=+1166.425195297" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.184155 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.194080 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.244261 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: E0606 09:32:35.244781 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="manila-scheduler" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.244821 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="manila-scheduler" Jun 06 09:32:35 crc kubenswrapper[4911]: E0606 09:32:35.244847 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="probe" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.244854 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="probe" Jun 06 09:32:35 crc kubenswrapper[4911]: E0606 09:32:35.244891 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="probe" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.244901 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="probe" Jun 06 09:32:35 crc kubenswrapper[4911]: E0606 09:32:35.244926 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="cinder-backup" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.244931 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="cinder-backup" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.249241 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="probe" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.249288 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" containerName="cinder-backup" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.249332 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="probe" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.249365 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" containerName="manila-scheduler" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.250764 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.255764 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.309168 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.320178 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.322891 4911 scope.go:117] "RemoveContainer" containerID="ad6bf48e7eaa52143e9c84d5f8eeb454e4462460800a93ad0c837d41cc2675c9" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.332367 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.348211 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.350259 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.354240 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.354424 4911 scope.go:117] "RemoveContainer" containerID="e59874f9b16a48d6ffb8e9ec63945e74d9de29139d79c8eef773dd6a8df0c8c1" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.384530 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.389768 4911 scope.go:117] "RemoveContainer" containerID="389a8d1ce8531cd3a04ebb23f6d4d12dd12da9be2a643fce93da2c248b8dbe1e" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434412 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-ceph\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434469 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-nvme\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434505 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434538 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spbns\" (UniqueName: \"kubernetes.io/projected/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-kube-api-access-spbns\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434562 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-localtime\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434577 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-run\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434591 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-sys\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434626 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434644 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-lib-modules\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434682 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434698 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434715 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-dev\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434746 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434770 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434799 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-config-data\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434818 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-scripts\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.434838 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-config-data-custom\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537043 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537086 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537129 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-dev\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537171 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537183 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537214 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-config-data\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537244 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-dev\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537250 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/16bc7add-06e1-4709-88f7-19e18988ee26-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537281 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537282 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537341 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-config-data\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537367 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-scripts\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537387 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-config-data-custom\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537408 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537484 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7bps\" (UniqueName: \"kubernetes.io/projected/16bc7add-06e1-4709-88f7-19e18988ee26-kube-api-access-h7bps\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537520 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-ceph\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537544 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-nvme\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537574 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537611 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spbns\" (UniqueName: \"kubernetes.io/projected/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-kube-api-access-spbns\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537636 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537657 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-localtime\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537675 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-run\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537693 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/16bc7add-06e1-4709-88f7-19e18988ee26-etc-localtime\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537710 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-sys\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537735 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537757 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-scripts\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537795 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537822 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-lib-modules\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537896 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-lib-modules\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.537914 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.538592 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-nvme\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.539206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-run\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.539320 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.539715 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-sys\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.539980 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-etc-localtime\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.549988 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-ceph\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.550506 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-config-data-custom\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.550687 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.555705 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-scripts\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.559854 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spbns\" (UniqueName: \"kubernetes.io/projected/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-kube-api-access-spbns\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.560836 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546d4fc3-dc24-45d0-b6dd-9237e2e648fd-config-data\") pod \"cinder-backup-0\" (UID: \"546d4fc3-dc24-45d0-b6dd-9237e2e648fd\") " pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.604837 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.639489 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/16bc7add-06e1-4709-88f7-19e18988ee26-etc-localtime\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.639559 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.639589 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-scripts\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.639652 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/16bc7add-06e1-4709-88f7-19e18988ee26-etc-localtime\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.639689 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-config-data\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.639751 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/16bc7add-06e1-4709-88f7-19e18988ee26-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.639884 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7bps\" (UniqueName: \"kubernetes.io/projected/16bc7add-06e1-4709-88f7-19e18988ee26-kube-api-access-h7bps\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.640054 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.640198 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/16bc7add-06e1-4709-88f7-19e18988ee26-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.646255 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-scripts\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.648660 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.651753 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-config-data\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.663575 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7bps\" (UniqueName: \"kubernetes.io/projected/16bc7add-06e1-4709-88f7-19e18988ee26-kube-api-access-h7bps\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.673824 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16bc7add-06e1-4709-88f7-19e18988ee26-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"16bc7add-06e1-4709-88f7-19e18988ee26\") " pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.679011 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.965505 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31189f8d-bd76-4b64-84d2-193881b6b47a" path="/var/lib/kubelet/pods/31189f8d-bd76-4b64-84d2-193881b6b47a/volumes" Jun 06 09:32:35 crc kubenswrapper[4911]: I0606 09:32:35.966595 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f60f5eac-7f8c-4909-ae10-449d6e6df432" path="/var/lib/kubelet/pods/f60f5eac-7f8c-4909-ae10-449d6e6df432/volumes" Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.365678 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.442652 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Jun 06 09:32:36 crc kubenswrapper[4911]: W0606 09:32:36.461232 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16bc7add_06e1_4709_88f7_19e18988ee26.slice/crio-fed51348553859df147bd99238644d411c851b9c4aa7670a9e11c573dddf88c5 WatchSource:0}: Error finding container fed51348553859df147bd99238644d411c851b9c4aa7670a9e11c573dddf88c5: Status 404 returned error can't find the container with id fed51348553859df147bd99238644d411c851b9c4aa7670a9e11c573dddf88c5 Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.469230 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.478769 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.739000 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-g64qx"] Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.740293 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g64qx" Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.753389 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-g64qx"] Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.839410 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-56x7b"] Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.841048 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-56x7b" Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.850521 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-56x7b"] Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.874654 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2z4w\" (UniqueName: \"kubernetes.io/projected/a1bfa29c-daf6-4b0c-89e6-3704863677e3-kube-api-access-l2z4w\") pod \"nova-api-db-create-g64qx\" (UID: \"a1bfa29c-daf6-4b0c-89e6-3704863677e3\") " pod="openstack/nova-api-db-create-g64qx" Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.977625 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2z4w\" (UniqueName: \"kubernetes.io/projected/a1bfa29c-daf6-4b0c-89e6-3704863677e3-kube-api-access-l2z4w\") pod \"nova-api-db-create-g64qx\" (UID: \"a1bfa29c-daf6-4b0c-89e6-3704863677e3\") " pod="openstack/nova-api-db-create-g64qx" Jun 06 09:32:36 crc kubenswrapper[4911]: I0606 09:32:36.978069 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcx72\" (UniqueName: \"kubernetes.io/projected/b0c460c5-5199-4a41-afb5-58b1bcbe0fa7-kube-api-access-xcx72\") pod \"nova-cell0-db-create-56x7b\" (UID: \"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7\") " pod="openstack/nova-cell0-db-create-56x7b" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.054996 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2z4w\" (UniqueName: \"kubernetes.io/projected/a1bfa29c-daf6-4b0c-89e6-3704863677e3-kube-api-access-l2z4w\") pod \"nova-api-db-create-g64qx\" (UID: \"a1bfa29c-daf6-4b0c-89e6-3704863677e3\") " pod="openstack/nova-api-db-create-g64qx" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.077545 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g64qx" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.085034 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcx72\" (UniqueName: \"kubernetes.io/projected/b0c460c5-5199-4a41-afb5-58b1bcbe0fa7-kube-api-access-xcx72\") pod \"nova-cell0-db-create-56x7b\" (UID: \"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7\") " pod="openstack/nova-cell0-db-create-56x7b" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.103215 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-v6s7j"] Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.104722 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6s7j" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.114980 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-v6s7j"] Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.129008 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcx72\" (UniqueName: \"kubernetes.io/projected/b0c460c5-5199-4a41-afb5-58b1bcbe0fa7-kube-api-access-xcx72\") pod \"nova-cell0-db-create-56x7b\" (UID: \"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7\") " pod="openstack/nova-cell0-db-create-56x7b" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.170037 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-56x7b" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.191360 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52j7b\" (UniqueName: \"kubernetes.io/projected/1528dab4-6c33-4923-8c32-9c2b39aee053-kube-api-access-52j7b\") pod \"nova-cell1-db-create-v6s7j\" (UID: \"1528dab4-6c33-4923-8c32-9c2b39aee053\") " pod="openstack/nova-cell1-db-create-v6s7j" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.201549 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"546d4fc3-dc24-45d0-b6dd-9237e2e648fd","Type":"ContainerStarted","Data":"2e575dee086b42f79a90e9b99fe157f8d948ee273dc09a9f8dc63547ecb60d08"} Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.201610 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"546d4fc3-dc24-45d0-b6dd-9237e2e648fd","Type":"ContainerStarted","Data":"aeb060831f7f3fe024ea7798a914bb7554e7bb3ca0ae42fb0275b80e6a2b9d15"} Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.209779 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"16bc7add-06e1-4709-88f7-19e18988ee26","Type":"ContainerStarted","Data":"fed51348553859df147bd99238644d411c851b9c4aa7670a9e11c573dddf88c5"} Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.293819 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52j7b\" (UniqueName: \"kubernetes.io/projected/1528dab4-6c33-4923-8c32-9c2b39aee053-kube-api-access-52j7b\") pod \"nova-cell1-db-create-v6s7j\" (UID: \"1528dab4-6c33-4923-8c32-9c2b39aee053\") " pod="openstack/nova-cell1-db-create-v6s7j" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.326074 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52j7b\" (UniqueName: \"kubernetes.io/projected/1528dab4-6c33-4923-8c32-9c2b39aee053-kube-api-access-52j7b\") pod \"nova-cell1-db-create-v6s7j\" (UID: \"1528dab4-6c33-4923-8c32-9c2b39aee053\") " pod="openstack/nova-cell1-db-create-v6s7j" Jun 06 09:32:37 crc kubenswrapper[4911]: I0606 09:32:37.426453 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6s7j" Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.033942 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-g64qx"] Jun 06 09:32:38 crc kubenswrapper[4911]: W0606 09:32:38.045242 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1bfa29c_daf6_4b0c_89e6_3704863677e3.slice/crio-38f032850f0f91320a43a64ee0305871a3de53151fb1f8880d60249441bf190c WatchSource:0}: Error finding container 38f032850f0f91320a43a64ee0305871a3de53151fb1f8880d60249441bf190c: Status 404 returned error can't find the container with id 38f032850f0f91320a43a64ee0305871a3de53151fb1f8880d60249441bf190c Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.095166 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-56x7b"] Jun 06 09:32:38 crc kubenswrapper[4911]: W0606 09:32:38.098471 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0c460c5_5199_4a41_afb5_58b1bcbe0fa7.slice/crio-9b728612a254f4520b2d3e08a67a0ed785347d3b583f98b1c7de17410e0d9adc WatchSource:0}: Error finding container 9b728612a254f4520b2d3e08a67a0ed785347d3b583f98b1c7de17410e0d9adc: Status 404 returned error can't find the container with id 9b728612a254f4520b2d3e08a67a0ed785347d3b583f98b1c7de17410e0d9adc Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.227910 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g64qx" event={"ID":"a1bfa29c-daf6-4b0c-89e6-3704863677e3","Type":"ContainerStarted","Data":"38f032850f0f91320a43a64ee0305871a3de53151fb1f8880d60249441bf190c"} Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.234275 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-56x7b" event={"ID":"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7","Type":"ContainerStarted","Data":"9b728612a254f4520b2d3e08a67a0ed785347d3b583f98b1c7de17410e0d9adc"} Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.244950 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"16bc7add-06e1-4709-88f7-19e18988ee26","Type":"ContainerStarted","Data":"57e5bbecff3e2ff9486345eb048805008360a61d831f624f7468b48013de98c0"} Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.245009 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"16bc7add-06e1-4709-88f7-19e18988ee26","Type":"ContainerStarted","Data":"4c907d5b33d3414eea29f26928cbfe7b1607d619ea58469ae849d3fddf58dcf1"} Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.259282 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"546d4fc3-dc24-45d0-b6dd-9237e2e648fd","Type":"ContainerStarted","Data":"c93a52c05ccfaf724671f5683aa1cd424e7bb1ff8abdfd7418d92a885634fa17"} Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.291476 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-v6s7j"] Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.302938 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.302913648 podStartE2EDuration="3.302913648s" podCreationTimestamp="2025-06-06 09:32:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:38.282174547 +0000 UTC m=+1169.557600100" watchObservedRunningTime="2025-06-06 09:32:38.302913648 +0000 UTC m=+1169.578339191" Jun 06 09:32:38 crc kubenswrapper[4911]: I0606 09:32:38.315001 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.314982847 podStartE2EDuration="3.314982847s" podCreationTimestamp="2025-06-06 09:32:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:38.31197744 +0000 UTC m=+1169.587402993" watchObservedRunningTime="2025-06-06 09:32:38.314982847 +0000 UTC m=+1169.590408390" Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.270806 4911 generic.go:334] "Generic (PLEG): container finished" podID="1528dab4-6c33-4923-8c32-9c2b39aee053" containerID="c0a19fffd24d50c5bc7253901921e07792a3c62f8221bf1fc754d6223aa849c7" exitCode=0 Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.270937 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v6s7j" event={"ID":"1528dab4-6c33-4923-8c32-9c2b39aee053","Type":"ContainerDied","Data":"c0a19fffd24d50c5bc7253901921e07792a3c62f8221bf1fc754d6223aa849c7"} Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.271403 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v6s7j" event={"ID":"1528dab4-6c33-4923-8c32-9c2b39aee053","Type":"ContainerStarted","Data":"3802ca2aecbf9a3bd6fa37dae7d9407038357e987bb9390db66ad4872aed9fde"} Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.272750 4911 generic.go:334] "Generic (PLEG): container finished" podID="a1bfa29c-daf6-4b0c-89e6-3704863677e3" containerID="d29315941a7e83f8341dbf443ef1920fdf3141d7aff95e5c066114e936667348" exitCode=0 Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.272841 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g64qx" event={"ID":"a1bfa29c-daf6-4b0c-89e6-3704863677e3","Type":"ContainerDied","Data":"d29315941a7e83f8341dbf443ef1920fdf3141d7aff95e5c066114e936667348"} Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.274038 4911 generic.go:334] "Generic (PLEG): container finished" podID="b0c460c5-5199-4a41-afb5-58b1bcbe0fa7" containerID="90aa699cc169549dcdc6a4e779dab49b54ed20ca772e353a836a4794accab471" exitCode=0 Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.274136 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-56x7b" event={"ID":"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7","Type":"ContainerDied","Data":"90aa699cc169549dcdc6a4e779dab49b54ed20ca772e353a836a4794accab471"} Jun 06 09:32:39 crc kubenswrapper[4911]: I0606 09:32:39.365284 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Jun 06 09:32:40 crc kubenswrapper[4911]: I0606 09:32:40.605600 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.271482 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6s7j" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.298777 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v6s7j" event={"ID":"1528dab4-6c33-4923-8c32-9c2b39aee053","Type":"ContainerDied","Data":"3802ca2aecbf9a3bd6fa37dae7d9407038357e987bb9390db66ad4872aed9fde"} Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.298820 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3802ca2aecbf9a3bd6fa37dae7d9407038357e987bb9390db66ad4872aed9fde" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.298834 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6s7j" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.382711 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52j7b\" (UniqueName: \"kubernetes.io/projected/1528dab4-6c33-4923-8c32-9c2b39aee053-kube-api-access-52j7b\") pod \"1528dab4-6c33-4923-8c32-9c2b39aee053\" (UID: \"1528dab4-6c33-4923-8c32-9c2b39aee053\") " Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.390440 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1528dab4-6c33-4923-8c32-9c2b39aee053-kube-api-access-52j7b" (OuterVolumeSpecName: "kube-api-access-52j7b") pod "1528dab4-6c33-4923-8c32-9c2b39aee053" (UID: "1528dab4-6c33-4923-8c32-9c2b39aee053"). InnerVolumeSpecName "kube-api-access-52j7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.486401 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52j7b\" (UniqueName: \"kubernetes.io/projected/1528dab4-6c33-4923-8c32-9c2b39aee053-kube-api-access-52j7b\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.501917 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-56x7b" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.506377 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g64qx" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.587521 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2z4w\" (UniqueName: \"kubernetes.io/projected/a1bfa29c-daf6-4b0c-89e6-3704863677e3-kube-api-access-l2z4w\") pod \"a1bfa29c-daf6-4b0c-89e6-3704863677e3\" (UID: \"a1bfa29c-daf6-4b0c-89e6-3704863677e3\") " Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.587629 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcx72\" (UniqueName: \"kubernetes.io/projected/b0c460c5-5199-4a41-afb5-58b1bcbe0fa7-kube-api-access-xcx72\") pod \"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7\" (UID: \"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7\") " Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.594439 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1bfa29c-daf6-4b0c-89e6-3704863677e3-kube-api-access-l2z4w" (OuterVolumeSpecName: "kube-api-access-l2z4w") pod "a1bfa29c-daf6-4b0c-89e6-3704863677e3" (UID: "a1bfa29c-daf6-4b0c-89e6-3704863677e3"). InnerVolumeSpecName "kube-api-access-l2z4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.603393 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0c460c5-5199-4a41-afb5-58b1bcbe0fa7-kube-api-access-xcx72" (OuterVolumeSpecName: "kube-api-access-xcx72") pod "b0c460c5-5199-4a41-afb5-58b1bcbe0fa7" (UID: "b0c460c5-5199-4a41-afb5-58b1bcbe0fa7"). InnerVolumeSpecName "kube-api-access-xcx72". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.682618 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.690806 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2z4w\" (UniqueName: \"kubernetes.io/projected/a1bfa29c-daf6-4b0c-89e6-3704863677e3-kube-api-access-l2z4w\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.690850 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcx72\" (UniqueName: \"kubernetes.io/projected/b0c460c5-5199-4a41-afb5-58b1bcbe0fa7-kube-api-access-xcx72\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.745068 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.917984 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Jun 06 09:32:41 crc kubenswrapper[4911]: I0606 09:32:41.924707 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.177755 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.312016 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g64qx" event={"ID":"a1bfa29c-daf6-4b0c-89e6-3704863677e3","Type":"ContainerDied","Data":"38f032850f0f91320a43a64ee0305871a3de53151fb1f8880d60249441bf190c"} Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.312071 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38f032850f0f91320a43a64ee0305871a3de53151fb1f8880d60249441bf190c" Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.312123 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g64qx" Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.315823 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-56x7b" Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.315861 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-56x7b" event={"ID":"b0c460c5-5199-4a41-afb5-58b1bcbe0fa7","Type":"ContainerDied","Data":"9b728612a254f4520b2d3e08a67a0ed785347d3b583f98b1c7de17410e0d9adc"} Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.315957 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b728612a254f4520b2d3e08a67a0ed785347d3b583f98b1c7de17410e0d9adc" Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.315984 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="manila-share" containerID="cri-o://f070e07587f58137b8549c14a8280b660a6be808a33526e89e3de3e0774bafeb" gracePeriod=30 Jun 06 09:32:42 crc kubenswrapper[4911]: I0606 09:32:42.316140 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="probe" containerID="cri-o://fc7d80efb3e46dd01b815948021265703231304a9a6115bc5e0ef821692a19af" gracePeriod=30 Jun 06 09:32:43 crc kubenswrapper[4911]: I0606 09:32:43.328374 4911 generic.go:334] "Generic (PLEG): container finished" podID="074afac4-64d1-4af0-a4fc-82e118a23756" containerID="fc7d80efb3e46dd01b815948021265703231304a9a6115bc5e0ef821692a19af" exitCode=0 Jun 06 09:32:43 crc kubenswrapper[4911]: I0606 09:32:43.328957 4911 generic.go:334] "Generic (PLEG): container finished" podID="074afac4-64d1-4af0-a4fc-82e118a23756" containerID="f070e07587f58137b8549c14a8280b660a6be808a33526e89e3de3e0774bafeb" exitCode=1 Jun 06 09:32:43 crc kubenswrapper[4911]: I0606 09:32:43.328464 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"074afac4-64d1-4af0-a4fc-82e118a23756","Type":"ContainerDied","Data":"fc7d80efb3e46dd01b815948021265703231304a9a6115bc5e0ef821692a19af"} Jun 06 09:32:43 crc kubenswrapper[4911]: I0606 09:32:43.329004 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"074afac4-64d1-4af0-a4fc-82e118a23756","Type":"ContainerDied","Data":"f070e07587f58137b8549c14a8280b660a6be808a33526e89e3de3e0774bafeb"} Jun 06 09:32:43 crc kubenswrapper[4911]: I0606 09:32:43.851157 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.023938 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data-custom\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024042 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024148 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-machine-id\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024204 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-combined-ca-bundle\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024221 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024242 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-ceph\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024389 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-localtime\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024437 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-var-lib-manila\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024478 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-689tk\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-kube-api-access-689tk\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024512 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-scripts\") pod \"074afac4-64d1-4af0-a4fc-82e118a23756\" (UID: \"074afac4-64d1-4af0-a4fc-82e118a23756\") " Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024580 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-localtime" (OuterVolumeSpecName: "etc-localtime") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "etc-localtime". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.024581 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.025930 4911 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.025960 4911 reconciler_common.go:293] "Volume detached for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-etc-localtime\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.025972 4911 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/074afac4-64d1-4af0-a4fc-82e118a23756-var-lib-manila\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.030895 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-kube-api-access-689tk" (OuterVolumeSpecName: "kube-api-access-689tk") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "kube-api-access-689tk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.030945 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-ceph" (OuterVolumeSpecName: "ceph") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.048280 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.049206 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-scripts" (OuterVolumeSpecName: "scripts") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.099424 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.127762 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.127806 4911 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-ceph\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.127820 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-689tk\" (UniqueName: \"kubernetes.io/projected/074afac4-64d1-4af0-a4fc-82e118a23756-kube-api-access-689tk\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.127831 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.127843 4911 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data-custom\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.153652 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data" (OuterVolumeSpecName: "config-data") pod "074afac4-64d1-4af0-a4fc-82e118a23756" (UID: "074afac4-64d1-4af0-a4fc-82e118a23756"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.230821 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074afac4-64d1-4af0-a4fc-82e118a23756-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.341323 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"074afac4-64d1-4af0-a4fc-82e118a23756","Type":"ContainerDied","Data":"d9d567b5cd0c5deb2e1d0cbdad1d649ca45f3548268d09a6edfb3979acbf38b9"} Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.341387 4911 scope.go:117] "RemoveContainer" containerID="fc7d80efb3e46dd01b815948021265703231304a9a6115bc5e0ef821692a19af" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.341424 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.370054 4911 scope.go:117] "RemoveContainer" containerID="f070e07587f58137b8549c14a8280b660a6be808a33526e89e3de3e0774bafeb" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.377149 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.390390 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.400963 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:44 crc kubenswrapper[4911]: E0606 09:32:44.401480 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="manila-share" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401508 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="manila-share" Jun 06 09:32:44 crc kubenswrapper[4911]: E0606 09:32:44.401529 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="probe" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401538 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="probe" Jun 06 09:32:44 crc kubenswrapper[4911]: E0606 09:32:44.401563 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1bfa29c-daf6-4b0c-89e6-3704863677e3" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401571 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1bfa29c-daf6-4b0c-89e6-3704863677e3" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: E0606 09:32:44.401600 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1528dab4-6c33-4923-8c32-9c2b39aee053" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401607 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1528dab4-6c33-4923-8c32-9c2b39aee053" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: E0606 09:32:44.401619 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0c460c5-5199-4a41-afb5-58b1bcbe0fa7" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401628 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0c460c5-5199-4a41-afb5-58b1bcbe0fa7" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401832 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1bfa29c-daf6-4b0c-89e6-3704863677e3" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401846 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="manila-share" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401865 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0c460c5-5199-4a41-afb5-58b1bcbe0fa7" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401883 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" containerName="probe" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.401898 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1528dab4-6c33-4923-8c32-9c2b39aee053" containerName="mariadb-database-create" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.403086 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.406650 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.416136 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536005 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536114 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536491 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-scripts\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536649 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-etc-localtime\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536720 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536755 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536826 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-config-data\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.536952 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p748d\" (UniqueName: \"kubernetes.io/projected/80c180ce-4fd8-40f4-b0bd-aa247612baa6-kube-api-access-p748d\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.537044 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/80c180ce-4fd8-40f4-b0bd-aa247612baa6-ceph\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639411 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p748d\" (UniqueName: \"kubernetes.io/projected/80c180ce-4fd8-40f4-b0bd-aa247612baa6-kube-api-access-p748d\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639502 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/80c180ce-4fd8-40f4-b0bd-aa247612baa6-ceph\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639530 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639577 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639670 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-scripts\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639729 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-etc-localtime\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639752 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639768 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639784 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-config-data\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639806 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.639904 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.640462 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-localtime\" (UniqueName: \"kubernetes.io/host-path/80c180ce-4fd8-40f4-b0bd-aa247612baa6-etc-localtime\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.645425 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-scripts\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.645919 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.646184 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/80c180ce-4fd8-40f4-b0bd-aa247612baa6-ceph\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.646560 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.650041 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c180ce-4fd8-40f4-b0bd-aa247612baa6-config-data\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.659913 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p748d\" (UniqueName: \"kubernetes.io/projected/80c180ce-4fd8-40f4-b0bd-aa247612baa6-kube-api-access-p748d\") pod \"manila-share-share1-0\" (UID: \"80c180ce-4fd8-40f4-b0bd-aa247612baa6\") " pod="openstack/manila-share-share1-0" Jun 06 09:32:44 crc kubenswrapper[4911]: I0606 09:32:44.732847 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Jun 06 09:32:45 crc kubenswrapper[4911]: I0606 09:32:45.401457 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Jun 06 09:32:45 crc kubenswrapper[4911]: I0606 09:32:45.679815 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Jun 06 09:32:45 crc kubenswrapper[4911]: I0606 09:32:45.869935 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Jun 06 09:32:45 crc kubenswrapper[4911]: I0606 09:32:45.962317 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="074afac4-64d1-4af0-a4fc-82e118a23756" path="/var/lib/kubelet/pods/074afac4-64d1-4af0-a4fc-82e118a23756/volumes" Jun 06 09:32:46 crc kubenswrapper[4911]: I0606 09:32:46.384465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"80c180ce-4fd8-40f4-b0bd-aa247612baa6","Type":"ContainerStarted","Data":"7af80109e66a36d8d60034d5f589790e409ec185392b55bd2e2d1fafe7587109"} Jun 06 09:32:46 crc kubenswrapper[4911]: I0606 09:32:46.384824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"80c180ce-4fd8-40f4-b0bd-aa247612baa6","Type":"ContainerStarted","Data":"5ce8b84ff086b266734065a3482bb93bd0079f03a53aa2d5a787d4efabfa72c2"} Jun 06 09:32:46 crc kubenswrapper[4911]: I0606 09:32:46.384836 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"80c180ce-4fd8-40f4-b0bd-aa247612baa6","Type":"ContainerStarted","Data":"839f6b67e4ac02a1d0d0abaeac56b4d334ab9b4bd042a1b43cb110e352cb30a1"} Jun 06 09:32:46 crc kubenswrapper[4911]: I0606 09:32:46.418477 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=2.418458768 podStartE2EDuration="2.418458768s" podCreationTimestamp="2025-06-06 09:32:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:46.411011087 +0000 UTC m=+1177.686436650" watchObservedRunningTime="2025-06-06 09:32:46.418458768 +0000 UTC m=+1177.693884311" Jun 06 09:32:47 crc kubenswrapper[4911]: I0606 09:32:47.728809 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:32:47 crc kubenswrapper[4911]: I0606 09:32:47.729803 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-httpd" containerID="cri-o://3730324e7d934b64ed51c4247ef3da62ca46bc5515eb96961c232db1c2bc591a" gracePeriod=30 Jun 06 09:32:47 crc kubenswrapper[4911]: I0606 09:32:47.729652 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-log" containerID="cri-o://cb43c126ab9d2f48c1da518fc8c59546e84cca87926a978fcf680190b45a07c2" gracePeriod=30 Jun 06 09:32:48 crc kubenswrapper[4911]: I0606 09:32:48.426474 4911 generic.go:334] "Generic (PLEG): container finished" podID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerID="cb43c126ab9d2f48c1da518fc8c59546e84cca87926a978fcf680190b45a07c2" exitCode=143 Jun 06 09:32:48 crc kubenswrapper[4911]: I0606 09:32:48.426607 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"39f3ac21-297f-4acd-b430-cf0dcd3d56ff","Type":"ContainerDied","Data":"cb43c126ab9d2f48c1da518fc8c59546e84cca87926a978fcf680190b45a07c2"} Jun 06 09:32:50 crc kubenswrapper[4911]: I0606 09:32:50.898824 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.143:9292/healthcheck\": dial tcp 10.217.0.143:9292: connect: connection refused" Jun 06 09:32:50 crc kubenswrapper[4911]: I0606 09:32:50.899048 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.143:9292/healthcheck\": dial tcp 10.217.0.143:9292: connect: connection refused" Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.292757 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.293305 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-log" containerID="cri-o://1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42" gracePeriod=30 Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.293565 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-httpd" containerID="cri-o://38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202" gracePeriod=30 Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.475853 4911 generic.go:334] "Generic (PLEG): container finished" podID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerID="3730324e7d934b64ed51c4247ef3da62ca46bc5515eb96961c232db1c2bc591a" exitCode=0 Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.476356 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"39f3ac21-297f-4acd-b430-cf0dcd3d56ff","Type":"ContainerDied","Data":"3730324e7d934b64ed51c4247ef3da62ca46bc5515eb96961c232db1c2bc591a"} Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.482021 4911 generic.go:334] "Generic (PLEG): container finished" podID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerID="1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42" exitCode=143 Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.482740 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6909d9d4-e754-4ac5-8f77-e20e48f96714","Type":"ContainerDied","Data":"1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42"} Jun 06 09:32:51 crc kubenswrapper[4911]: I0606 09:32:51.875250 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020411 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8c66\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-kube-api-access-m8c66\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020498 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-httpd-run\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020654 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020696 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-public-tls-certs\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020735 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-combined-ca-bundle\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020808 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-config-data\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020830 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-ceph\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020889 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-scripts\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.020927 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-logs\") pod \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\" (UID: \"39f3ac21-297f-4acd-b430-cf0dcd3d56ff\") " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.021573 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-logs" (OuterVolumeSpecName: "logs") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.022068 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.027172 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.029590 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-kube-api-access-m8c66" (OuterVolumeSpecName: "kube-api-access-m8c66") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "kube-api-access-m8c66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.033106 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-ceph" (OuterVolumeSpecName: "ceph") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.033166 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-scripts" (OuterVolumeSpecName: "scripts") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.055702 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.086689 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.097567 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-config-data" (OuterVolumeSpecName: "config-data") pod "39f3ac21-297f-4acd-b430-cf0dcd3d56ff" (UID: "39f3ac21-297f-4acd-b430-cf0dcd3d56ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123794 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-httpd-run\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123867 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123880 4911 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123896 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123909 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123920 4911 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-ceph\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123930 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123941 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.123954 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8c66\" (UniqueName: \"kubernetes.io/projected/39f3ac21-297f-4acd-b430-cf0dcd3d56ff-kube-api-access-m8c66\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.155643 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.225740 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.495558 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"39f3ac21-297f-4acd-b430-cf0dcd3d56ff","Type":"ContainerDied","Data":"7011273400e464fffcd6b38a48a9199e57fe908da7414392c0506e42e219aa97"} Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.495631 4911 scope.go:117] "RemoveContainer" containerID="3730324e7d934b64ed51c4247ef3da62ca46bc5515eb96961c232db1c2bc591a" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.495766 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.536523 4911 scope.go:117] "RemoveContainer" containerID="cb43c126ab9d2f48c1da518fc8c59546e84cca87926a978fcf680190b45a07c2" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.540710 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.557039 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.574054 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:32:52 crc kubenswrapper[4911]: E0606 09:32:52.574589 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-log" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.574607 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-log" Jun 06 09:32:52 crc kubenswrapper[4911]: E0606 09:32:52.574621 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-httpd" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.574628 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-httpd" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.574885 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-httpd" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.574905 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" containerName="glance-log" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.577409 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.581760 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.582059 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.585048 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.735875 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63f644ac-2580-4e7b-a723-f9787e2aacad-logs\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736082 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/63f644ac-2580-4e7b-a723-f9787e2aacad-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736215 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-config-data\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736245 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736327 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736424 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736499 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/63f644ac-2580-4e7b-a723-f9787e2aacad-ceph\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736529 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-scripts\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.736569 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7z6k\" (UniqueName: \"kubernetes.io/projected/63f644ac-2580-4e7b-a723-f9787e2aacad-kube-api-access-r7z6k\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.851412 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/63f644ac-2580-4e7b-a723-f9787e2aacad-ceph\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.851759 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-scripts\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.851802 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7z6k\" (UniqueName: \"kubernetes.io/projected/63f644ac-2580-4e7b-a723-f9787e2aacad-kube-api-access-r7z6k\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.851921 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63f644ac-2580-4e7b-a723-f9787e2aacad-logs\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.851982 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/63f644ac-2580-4e7b-a723-f9787e2aacad-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.852033 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-config-data\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.852057 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.852084 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.852131 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.852386 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.852925 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63f644ac-2580-4e7b-a723-f9787e2aacad-logs\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.853238 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/63f644ac-2580-4e7b-a723-f9787e2aacad-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.857997 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-scripts\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.858725 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/63f644ac-2580-4e7b-a723-f9787e2aacad-ceph\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.859515 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-config-data\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.860964 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.868835 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63f644ac-2580-4e7b-a723-f9787e2aacad-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.881733 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7z6k\" (UniqueName: \"kubernetes.io/projected/63f644ac-2580-4e7b-a723-f9787e2aacad-kube-api-access-r7z6k\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.891883 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"63f644ac-2580-4e7b-a723-f9787e2aacad\") " pod="openstack/glance-default-external-api-0" Jun 06 09:32:52 crc kubenswrapper[4911]: I0606 09:32:52.899780 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jun 06 09:32:53 crc kubenswrapper[4911]: W0606 09:32:53.520103 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63f644ac_2580_4e7b_a723_f9787e2aacad.slice/crio-c2828fb3852b559153b507cf4acbfd6f6595d58ee67186c0af048c4353d3ae45 WatchSource:0}: Error finding container c2828fb3852b559153b507cf4acbfd6f6595d58ee67186c0af048c4353d3ae45: Status 404 returned error can't find the container with id c2828fb3852b559153b507cf4acbfd6f6595d58ee67186c0af048c4353d3ae45 Jun 06 09:32:53 crc kubenswrapper[4911]: I0606 09:32:53.528121 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jun 06 09:32:53 crc kubenswrapper[4911]: I0606 09:32:53.942491 4911 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod67d3c09a-3215-4dfe-8838-d621e317b13e"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod67d3c09a-3215-4dfe-8838-d621e317b13e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod67d3c09a_3215_4dfe_8838_d621e317b13e.slice" Jun 06 09:32:53 crc kubenswrapper[4911]: E0606 09:32:53.943046 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod67d3c09a-3215-4dfe-8838-d621e317b13e] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod67d3c09a-3215-4dfe-8838-d621e317b13e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod67d3c09a_3215_4dfe_8838_d621e317b13e.slice" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" Jun 06 09:32:53 crc kubenswrapper[4911]: I0606 09:32:53.963433 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39f3ac21-297f-4acd-b430-cf0dcd3d56ff" path="/var/lib/kubelet/pods/39f3ac21-297f-4acd-b430-cf0dcd3d56ff/volumes" Jun 06 09:32:54 crc kubenswrapper[4911]: I0606 09:32:54.528958 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"63f644ac-2580-4e7b-a723-f9787e2aacad","Type":"ContainerStarted","Data":"320fc053b9b0f7877d497d34273de9dc558a5d2c8fffea39ad6af4368e18bde1"} Jun 06 09:32:54 crc kubenswrapper[4911]: I0606 09:32:54.529305 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"63f644ac-2580-4e7b-a723-f9787e2aacad","Type":"ContainerStarted","Data":"c2828fb3852b559153b507cf4acbfd6f6595d58ee67186c0af048c4353d3ae45"} Jun 06 09:32:54 crc kubenswrapper[4911]: I0606 09:32:54.528994 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57f4d4886c-m5nx6" Jun 06 09:32:54 crc kubenswrapper[4911]: I0606 09:32:54.589241 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57f4d4886c-m5nx6"] Jun 06 09:32:54 crc kubenswrapper[4911]: I0606 09:32:54.597868 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57f4d4886c-m5nx6"] Jun 06 09:32:54 crc kubenswrapper[4911]: I0606 09:32:54.733566 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.288238 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.467721 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.541741 4911 generic.go:334] "Generic (PLEG): container finished" podID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerID="38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202" exitCode=0 Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.541846 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.541871 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6909d9d4-e754-4ac5-8f77-e20e48f96714","Type":"ContainerDied","Data":"38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202"} Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.541971 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6909d9d4-e754-4ac5-8f77-e20e48f96714","Type":"ContainerDied","Data":"34328d87ddfacd3970195098329af376246635c22c71f53b76bb2942470d94aa"} Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.542004 4911 scope.go:117] "RemoveContainer" containerID="38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.546202 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"63f644ac-2580-4e7b-a723-f9787e2aacad","Type":"ContainerStarted","Data":"2026d4c8ebef46ec832776f6f49892dc66c1dbc509f757248eb48614c1a93e7a"} Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.571597 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.571574446 podStartE2EDuration="3.571574446s" podCreationTimestamp="2025-06-06 09:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:55.564577145 +0000 UTC m=+1186.840002698" watchObservedRunningTime="2025-06-06 09:32:55.571574446 +0000 UTC m=+1186.846999989" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.588896 4911 scope.go:117] "RemoveContainer" containerID="1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.612224 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-config-data\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613403 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-logs\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613466 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-ceph\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613533 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-internal-tls-certs\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613557 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-scripts\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613584 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-httpd-run\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613639 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twdmt\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-kube-api-access-twdmt\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613655 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-combined-ca-bundle\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.613818 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"6909d9d4-e754-4ac5-8f77-e20e48f96714\" (UID: \"6909d9d4-e754-4ac5-8f77-e20e48f96714\") " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.614082 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-logs" (OuterVolumeSpecName: "logs") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.614616 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.615010 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.620140 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.621856 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-ceph" (OuterVolumeSpecName: "ceph") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.622286 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-kube-api-access-twdmt" (OuterVolumeSpecName: "kube-api-access-twdmt") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "kube-api-access-twdmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.622431 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-scripts" (OuterVolumeSpecName: "scripts") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.627742 4911 scope.go:117] "RemoveContainer" containerID="38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202" Jun 06 09:32:55 crc kubenswrapper[4911]: E0606 09:32:55.628281 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202\": container with ID starting with 38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202 not found: ID does not exist" containerID="38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.628324 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202"} err="failed to get container status \"38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202\": rpc error: code = NotFound desc = could not find container \"38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202\": container with ID starting with 38ca5436ca831a66ac48e63984b977712d5cbbe15a1a8812f9a01ee597b75202 not found: ID does not exist" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.628353 4911 scope.go:117] "RemoveContainer" containerID="1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42" Jun 06 09:32:55 crc kubenswrapper[4911]: E0606 09:32:55.628818 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42\": container with ID starting with 1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42 not found: ID does not exist" containerID="1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.628845 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42"} err="failed to get container status \"1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42\": rpc error: code = NotFound desc = could not find container \"1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42\": container with ID starting with 1318bfb519240113120dde9f7dc30f6f9e9321352ab612579c0343fd7d59df42 not found: ID does not exist" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.651935 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.673289 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-config-data" (OuterVolumeSpecName: "config-data") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.690732 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6909d9d4-e754-4ac5-8f77-e20e48f96714" (UID: "6909d9d4-e754-4ac5-8f77-e20e48f96714"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716832 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716873 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716893 4911 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-ceph\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716903 4911 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716914 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716922 4911 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6909d9d4-e754-4ac5-8f77-e20e48f96714-httpd-run\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716930 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twdmt\" (UniqueName: \"kubernetes.io/projected/6909d9d4-e754-4ac5-8f77-e20e48f96714-kube-api-access-twdmt\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.716939 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6909d9d4-e754-4ac5-8f77-e20e48f96714-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.740606 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.818526 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.876120 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.886328 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.900215 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:32:55 crc kubenswrapper[4911]: E0606 09:32:55.900620 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-httpd" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.900637 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-httpd" Jun 06 09:32:55 crc kubenswrapper[4911]: E0606 09:32:55.900678 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-log" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.900685 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-log" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.900912 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-log" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.900954 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" containerName="glance-httpd" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.902479 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.906125 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.906195 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.912628 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.983260 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67d3c09a-3215-4dfe-8838-d621e317b13e" path="/var/lib/kubelet/pods/67d3c09a-3215-4dfe-8838-d621e317b13e/volumes" Jun 06 09:32:55 crc kubenswrapper[4911]: I0606 09:32:55.986844 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6909d9d4-e754-4ac5-8f77-e20e48f96714" path="/var/lib/kubelet/pods/6909d9d4-e754-4ac5-8f77-e20e48f96714/volumes" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.023868 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/37d93350-7e25-445d-97e7-0095ebd1d997-ceph\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.023988 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.024019 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37d93350-7e25-445d-97e7-0095ebd1d997-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.024040 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.024143 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npdnl\" (UniqueName: \"kubernetes.io/projected/37d93350-7e25-445d-97e7-0095ebd1d997-kube-api-access-npdnl\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.024202 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.024247 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37d93350-7e25-445d-97e7-0095ebd1d997-logs\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.024269 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.024466 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126370 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126422 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37d93350-7e25-445d-97e7-0095ebd1d997-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126444 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126498 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npdnl\" (UniqueName: \"kubernetes.io/projected/37d93350-7e25-445d-97e7-0095ebd1d997-kube-api-access-npdnl\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126547 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126581 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37d93350-7e25-445d-97e7-0095ebd1d997-logs\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126602 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126687 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.126730 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/37d93350-7e25-445d-97e7-0095ebd1d997-ceph\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.127502 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37d93350-7e25-445d-97e7-0095ebd1d997-logs\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.127542 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.127682 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37d93350-7e25-445d-97e7-0095ebd1d997-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.132547 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.132687 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.132823 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/37d93350-7e25-445d-97e7-0095ebd1d997-ceph\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.139960 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.141384 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d93350-7e25-445d-97e7-0095ebd1d997-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.156021 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npdnl\" (UniqueName: \"kubernetes.io/projected/37d93350-7e25-445d-97e7-0095ebd1d997-kube-api-access-npdnl\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.159302 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"37d93350-7e25-445d-97e7-0095ebd1d997\") " pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.223609 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jun 06 09:32:56 crc kubenswrapper[4911]: W0606 09:32:56.759356 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37d93350_7e25_445d_97e7_0095ebd1d997.slice/crio-4b2a1cbef133a997b5863abbc1f33d61b958943e3c90d31a7899fa2fc704213b WatchSource:0}: Error finding container 4b2a1cbef133a997b5863abbc1f33d61b958943e3c90d31a7899fa2fc704213b: Status 404 returned error can't find the container with id 4b2a1cbef133a997b5863abbc1f33d61b958943e3c90d31a7899fa2fc704213b Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.768748 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.895970 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-99ed-account-create-wm9cl"] Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.897504 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-99ed-account-create-wm9cl" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.899431 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.908288 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-99ed-account-create-wm9cl"] Jun 06 09:32:56 crc kubenswrapper[4911]: I0606 09:32:56.942055 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r747z\" (UniqueName: \"kubernetes.io/projected/511fdc0d-3107-49da-aee6-58fcb3071264-kube-api-access-r747z\") pod \"nova-api-99ed-account-create-wm9cl\" (UID: \"511fdc0d-3107-49da-aee6-58fcb3071264\") " pod="openstack/nova-api-99ed-account-create-wm9cl" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.044301 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r747z\" (UniqueName: \"kubernetes.io/projected/511fdc0d-3107-49da-aee6-58fcb3071264-kube-api-access-r747z\") pod \"nova-api-99ed-account-create-wm9cl\" (UID: \"511fdc0d-3107-49da-aee6-58fcb3071264\") " pod="openstack/nova-api-99ed-account-create-wm9cl" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.064797 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r747z\" (UniqueName: \"kubernetes.io/projected/511fdc0d-3107-49da-aee6-58fcb3071264-kube-api-access-r747z\") pod \"nova-api-99ed-account-create-wm9cl\" (UID: \"511fdc0d-3107-49da-aee6-58fcb3071264\") " pod="openstack/nova-api-99ed-account-create-wm9cl" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.094656 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-e109-account-create-tl6lr"] Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.096087 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e109-account-create-tl6lr" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.098158 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.110344 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e109-account-create-tl6lr"] Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.146980 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7zz4\" (UniqueName: \"kubernetes.io/projected/234e8605-be3a-44ec-8888-be1ce11be223-kube-api-access-j7zz4\") pod \"nova-cell0-e109-account-create-tl6lr\" (UID: \"234e8605-be3a-44ec-8888-be1ce11be223\") " pod="openstack/nova-cell0-e109-account-create-tl6lr" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.229869 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-99ed-account-create-wm9cl" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.249021 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7zz4\" (UniqueName: \"kubernetes.io/projected/234e8605-be3a-44ec-8888-be1ce11be223-kube-api-access-j7zz4\") pod \"nova-cell0-e109-account-create-tl6lr\" (UID: \"234e8605-be3a-44ec-8888-be1ce11be223\") " pod="openstack/nova-cell0-e109-account-create-tl6lr" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.274920 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7zz4\" (UniqueName: \"kubernetes.io/projected/234e8605-be3a-44ec-8888-be1ce11be223-kube-api-access-j7zz4\") pod \"nova-cell0-e109-account-create-tl6lr\" (UID: \"234e8605-be3a-44ec-8888-be1ce11be223\") " pod="openstack/nova-cell0-e109-account-create-tl6lr" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.289921 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-14a8-account-create-74hbj"] Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.291662 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-14a8-account-create-74hbj" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.294707 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.299496 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-14a8-account-create-74hbj"] Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.351412 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvgtn\" (UniqueName: \"kubernetes.io/projected/11097c32-bd99-4810-b897-7edf75c5e2cb-kube-api-access-nvgtn\") pod \"nova-cell1-14a8-account-create-74hbj\" (UID: \"11097c32-bd99-4810-b897-7edf75c5e2cb\") " pod="openstack/nova-cell1-14a8-account-create-74hbj" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.449350 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e109-account-create-tl6lr" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.453390 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvgtn\" (UniqueName: \"kubernetes.io/projected/11097c32-bd99-4810-b897-7edf75c5e2cb-kube-api-access-nvgtn\") pod \"nova-cell1-14a8-account-create-74hbj\" (UID: \"11097c32-bd99-4810-b897-7edf75c5e2cb\") " pod="openstack/nova-cell1-14a8-account-create-74hbj" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.477760 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvgtn\" (UniqueName: \"kubernetes.io/projected/11097c32-bd99-4810-b897-7edf75c5e2cb-kube-api-access-nvgtn\") pod \"nova-cell1-14a8-account-create-74hbj\" (UID: \"11097c32-bd99-4810-b897-7edf75c5e2cb\") " pod="openstack/nova-cell1-14a8-account-create-74hbj" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.572217 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37d93350-7e25-445d-97e7-0095ebd1d997","Type":"ContainerStarted","Data":"4b2a1cbef133a997b5863abbc1f33d61b958943e3c90d31a7899fa2fc704213b"} Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.694366 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-14a8-account-create-74hbj" Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.761606 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-99ed-account-create-wm9cl"] Jun 06 09:32:57 crc kubenswrapper[4911]: I0606 09:32:57.831685 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.310392 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e109-account-create-tl6lr"] Jun 06 09:32:58 crc kubenswrapper[4911]: W0606 09:32:58.317282 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod234e8605_be3a_44ec_8888_be1ce11be223.slice/crio-6a1b3f7ae33dd96f5f3fd4c4ce2095a045868168c034fb890db71fbc391c56d0 WatchSource:0}: Error finding container 6a1b3f7ae33dd96f5f3fd4c4ce2095a045868168c034fb890db71fbc391c56d0: Status 404 returned error can't find the container with id 6a1b3f7ae33dd96f5f3fd4c4ce2095a045868168c034fb890db71fbc391c56d0 Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.536444 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-14a8-account-create-74hbj"] Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.598791 4911 generic.go:334] "Generic (PLEG): container finished" podID="511fdc0d-3107-49da-aee6-58fcb3071264" containerID="1a5db47afbc0c1b7bc5be0030cc80b38ef2fae18ffb544710c409d65988c99a7" exitCode=0 Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.598914 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-99ed-account-create-wm9cl" event={"ID":"511fdc0d-3107-49da-aee6-58fcb3071264","Type":"ContainerDied","Data":"1a5db47afbc0c1b7bc5be0030cc80b38ef2fae18ffb544710c409d65988c99a7"} Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.599457 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-99ed-account-create-wm9cl" event={"ID":"511fdc0d-3107-49da-aee6-58fcb3071264","Type":"ContainerStarted","Data":"72140bb43204f934a73d8c08f6052e5ed803df993ab30597f62a3982ebe6d97f"} Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.602017 4911 generic.go:334] "Generic (PLEG): container finished" podID="234e8605-be3a-44ec-8888-be1ce11be223" containerID="8739719d3d618f597405794249f3e87f976150ea7793e261e470d28b69c53c6f" exitCode=0 Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.602142 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e109-account-create-tl6lr" event={"ID":"234e8605-be3a-44ec-8888-be1ce11be223","Type":"ContainerDied","Data":"8739719d3d618f597405794249f3e87f976150ea7793e261e470d28b69c53c6f"} Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.602174 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e109-account-create-tl6lr" event={"ID":"234e8605-be3a-44ec-8888-be1ce11be223","Type":"ContainerStarted","Data":"6a1b3f7ae33dd96f5f3fd4c4ce2095a045868168c034fb890db71fbc391c56d0"} Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.604800 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37d93350-7e25-445d-97e7-0095ebd1d997","Type":"ContainerStarted","Data":"f83f371770302dbb9489dd0f5abf871e665fe33901dbbe53685b75a44cd63415"} Jun 06 09:32:58 crc kubenswrapper[4911]: I0606 09:32:58.604840 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37d93350-7e25-445d-97e7-0095ebd1d997","Type":"ContainerStarted","Data":"fa9ee65d36e9ca66a295004d32b5e4cb2911deebc0477d1de299ee6dd4e6de7a"} Jun 06 09:32:59 crc kubenswrapper[4911]: I0606 09:32:59.618302 4911 generic.go:334] "Generic (PLEG): container finished" podID="11097c32-bd99-4810-b897-7edf75c5e2cb" containerID="06d0629dd04f49c0b4ad482308bde30ca337e6b82fb1d94ac01fd6420df3649c" exitCode=0 Jun 06 09:32:59 crc kubenswrapper[4911]: I0606 09:32:59.618735 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-14a8-account-create-74hbj" event={"ID":"11097c32-bd99-4810-b897-7edf75c5e2cb","Type":"ContainerDied","Data":"06d0629dd04f49c0b4ad482308bde30ca337e6b82fb1d94ac01fd6420df3649c"} Jun 06 09:32:59 crc kubenswrapper[4911]: I0606 09:32:59.618781 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-14a8-account-create-74hbj" event={"ID":"11097c32-bd99-4810-b897-7edf75c5e2cb","Type":"ContainerStarted","Data":"c7608e959ba41836f72848bd2688262d1c6638c9721de0f09aedd1d0cc30400a"} Jun 06 09:32:59 crc kubenswrapper[4911]: I0606 09:32:59.634624 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.6345970130000005 podStartE2EDuration="4.634597013s" podCreationTimestamp="2025-06-06 09:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:32:58.637872294 +0000 UTC m=+1189.913297837" watchObservedRunningTime="2025-06-06 09:32:59.634597013 +0000 UTC m=+1190.910022556" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.636266 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-99ed-account-create-wm9cl" event={"ID":"511fdc0d-3107-49da-aee6-58fcb3071264","Type":"ContainerDied","Data":"72140bb43204f934a73d8c08f6052e5ed803df993ab30597f62a3982ebe6d97f"} Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.636584 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72140bb43204f934a73d8c08f6052e5ed803df993ab30597f62a3982ebe6d97f" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.638317 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e109-account-create-tl6lr" event={"ID":"234e8605-be3a-44ec-8888-be1ce11be223","Type":"ContainerDied","Data":"6a1b3f7ae33dd96f5f3fd4c4ce2095a045868168c034fb890db71fbc391c56d0"} Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.638370 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a1b3f7ae33dd96f5f3fd4c4ce2095a045868168c034fb890db71fbc391c56d0" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.686572 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e109-account-create-tl6lr" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.697463 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-99ed-account-create-wm9cl" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.721967 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7zz4\" (UniqueName: \"kubernetes.io/projected/234e8605-be3a-44ec-8888-be1ce11be223-kube-api-access-j7zz4\") pod \"234e8605-be3a-44ec-8888-be1ce11be223\" (UID: \"234e8605-be3a-44ec-8888-be1ce11be223\") " Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.722060 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r747z\" (UniqueName: \"kubernetes.io/projected/511fdc0d-3107-49da-aee6-58fcb3071264-kube-api-access-r747z\") pod \"511fdc0d-3107-49da-aee6-58fcb3071264\" (UID: \"511fdc0d-3107-49da-aee6-58fcb3071264\") " Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.730130 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/234e8605-be3a-44ec-8888-be1ce11be223-kube-api-access-j7zz4" (OuterVolumeSpecName: "kube-api-access-j7zz4") pod "234e8605-be3a-44ec-8888-be1ce11be223" (UID: "234e8605-be3a-44ec-8888-be1ce11be223"). InnerVolumeSpecName "kube-api-access-j7zz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.757229 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/511fdc0d-3107-49da-aee6-58fcb3071264-kube-api-access-r747z" (OuterVolumeSpecName: "kube-api-access-r747z") pod "511fdc0d-3107-49da-aee6-58fcb3071264" (UID: "511fdc0d-3107-49da-aee6-58fcb3071264"). InnerVolumeSpecName "kube-api-access-r747z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.824060 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7zz4\" (UniqueName: \"kubernetes.io/projected/234e8605-be3a-44ec-8888-be1ce11be223-kube-api-access-j7zz4\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:00 crc kubenswrapper[4911]: I0606 09:33:00.824121 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r747z\" (UniqueName: \"kubernetes.io/projected/511fdc0d-3107-49da-aee6-58fcb3071264-kube-api-access-r747z\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.330073 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-14a8-account-create-74hbj" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.434936 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvgtn\" (UniqueName: \"kubernetes.io/projected/11097c32-bd99-4810-b897-7edf75c5e2cb-kube-api-access-nvgtn\") pod \"11097c32-bd99-4810-b897-7edf75c5e2cb\" (UID: \"11097c32-bd99-4810-b897-7edf75c5e2cb\") " Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.440658 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11097c32-bd99-4810-b897-7edf75c5e2cb-kube-api-access-nvgtn" (OuterVolumeSpecName: "kube-api-access-nvgtn") pod "11097c32-bd99-4810-b897-7edf75c5e2cb" (UID: "11097c32-bd99-4810-b897-7edf75c5e2cb"). InnerVolumeSpecName "kube-api-access-nvgtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.537567 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvgtn\" (UniqueName: \"kubernetes.io/projected/11097c32-bd99-4810-b897-7edf75c5e2cb-kube-api-access-nvgtn\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.661778 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-14a8-account-create-74hbj" event={"ID":"11097c32-bd99-4810-b897-7edf75c5e2cb","Type":"ContainerDied","Data":"c7608e959ba41836f72848bd2688262d1c6638c9721de0f09aedd1d0cc30400a"} Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.661832 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7608e959ba41836f72848bd2688262d1c6638c9721de0f09aedd1d0cc30400a" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.661798 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-14a8-account-create-74hbj" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.661894 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-99ed-account-create-wm9cl" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.661792 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e109-account-create-tl6lr" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.797307 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-bl2zm"] Jun 06 09:33:01 crc kubenswrapper[4911]: E0606 09:33:01.797800 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="234e8605-be3a-44ec-8888-be1ce11be223" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.797828 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="234e8605-be3a-44ec-8888-be1ce11be223" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: E0606 09:33:01.797867 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511fdc0d-3107-49da-aee6-58fcb3071264" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.797876 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="511fdc0d-3107-49da-aee6-58fcb3071264" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: E0606 09:33:01.797907 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11097c32-bd99-4810-b897-7edf75c5e2cb" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.797914 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="11097c32-bd99-4810-b897-7edf75c5e2cb" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.798135 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="234e8605-be3a-44ec-8888-be1ce11be223" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.798176 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="511fdc0d-3107-49da-aee6-58fcb3071264" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.798192 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="11097c32-bd99-4810-b897-7edf75c5e2cb" containerName="mariadb-account-create" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.798968 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bl2zm" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.963193 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-host\") pod \"crc-debug-bl2zm\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " pod="openstack/crc-debug-bl2zm" Jun 06 09:33:01 crc kubenswrapper[4911]: I0606 09:33:01.963295 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htzj8\" (UniqueName: \"kubernetes.io/projected/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-kube-api-access-htzj8\") pod \"crc-debug-bl2zm\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " pod="openstack/crc-debug-bl2zm" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.064947 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-host\") pod \"crc-debug-bl2zm\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " pod="openstack/crc-debug-bl2zm" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.065008 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htzj8\" (UniqueName: \"kubernetes.io/projected/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-kube-api-access-htzj8\") pod \"crc-debug-bl2zm\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " pod="openstack/crc-debug-bl2zm" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.065459 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-host\") pod \"crc-debug-bl2zm\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " pod="openstack/crc-debug-bl2zm" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.086031 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htzj8\" (UniqueName: \"kubernetes.io/projected/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-kube-api-access-htzj8\") pod \"crc-debug-bl2zm\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " pod="openstack/crc-debug-bl2zm" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.119583 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bl2zm" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.343706 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6rrkj"] Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.345649 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.348057 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.348069 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tg6z6" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.348195 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.363027 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6rrkj"] Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.371118 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.371789 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvjxg\" (UniqueName: \"kubernetes.io/projected/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-kube-api-access-cvjxg\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.371965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-config-data\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.372044 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-scripts\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.473912 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvjxg\" (UniqueName: \"kubernetes.io/projected/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-kube-api-access-cvjxg\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.474029 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-config-data\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.474069 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-scripts\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.475029 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.478868 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-config-data\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.479301 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-scripts\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.479484 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.491397 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvjxg\" (UniqueName: \"kubernetes.io/projected/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-kube-api-access-cvjxg\") pod \"nova-cell0-conductor-db-sync-6rrkj\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.676118 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bl2zm" event={"ID":"a1c90b25-840e-4bb0-b19e-82bd3585cdf5","Type":"ContainerStarted","Data":"5d1343d141c059a94a5c3e1a7b4989040f764ec3adb17a96f95dc552b836b175"} Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.676214 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bl2zm" event={"ID":"a1c90b25-840e-4bb0-b19e-82bd3585cdf5","Type":"ContainerStarted","Data":"0c560248809ab55c3dc8384e2f42946195f032ba3df6a93446f33c25a4c70e22"} Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.688705 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.694948 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-bl2zm" podStartSLOduration=1.6949292969999998 podStartE2EDuration="1.694929297s" podCreationTimestamp="2025-06-06 09:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:02.691317384 +0000 UTC m=+1193.966742927" watchObservedRunningTime="2025-06-06 09:33:02.694929297 +0000 UTC m=+1193.970354830" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.900537 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.900583 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.937032 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jun 06 09:33:02 crc kubenswrapper[4911]: I0606 09:33:02.950608 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jun 06 09:33:03 crc kubenswrapper[4911]: I0606 09:33:03.189289 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6rrkj"] Jun 06 09:33:03 crc kubenswrapper[4911]: W0606 09:33:03.194345 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa07fe9d_3188_4dae_b4f9_29e4a1efbbfa.slice/crio-951ecaeadda1fa7c2a5c92d6a212b399eb6fe87abf1ef8e1b2f0cdc39c3057a8 WatchSource:0}: Error finding container 951ecaeadda1fa7c2a5c92d6a212b399eb6fe87abf1ef8e1b2f0cdc39c3057a8: Status 404 returned error can't find the container with id 951ecaeadda1fa7c2a5c92d6a212b399eb6fe87abf1ef8e1b2f0cdc39c3057a8 Jun 06 09:33:03 crc kubenswrapper[4911]: I0606 09:33:03.698195 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" event={"ID":"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa","Type":"ContainerStarted","Data":"951ecaeadda1fa7c2a5c92d6a212b399eb6fe87abf1ef8e1b2f0cdc39c3057a8"} Jun 06 09:33:03 crc kubenswrapper[4911]: I0606 09:33:03.698282 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jun 06 09:33:03 crc kubenswrapper[4911]: I0606 09:33:03.698620 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.189870 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.190521 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.224305 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.224622 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.237356 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.278929 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.314231 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.734764 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.734825 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:06 crc kubenswrapper[4911]: I0606 09:33:06.800490 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Jun 06 09:33:07 crc kubenswrapper[4911]: I0606 09:33:07.089877 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:33:07 crc kubenswrapper[4911]: I0606 09:33:07.090161 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="3a014170-9bee-45a3-af73-3a5c0418de93" containerName="kube-state-metrics" containerID="cri-o://ead6721d9e3dd513e9b22bae105bbd48b465dff45673bd08c03d08293f46c111" gracePeriod=30 Jun 06 09:33:07 crc kubenswrapper[4911]: I0606 09:33:07.746281 4911 generic.go:334] "Generic (PLEG): container finished" podID="3a014170-9bee-45a3-af73-3a5c0418de93" containerID="ead6721d9e3dd513e9b22bae105bbd48b465dff45673bd08c03d08293f46c111" exitCode=2 Jun 06 09:33:07 crc kubenswrapper[4911]: I0606 09:33:07.746371 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a014170-9bee-45a3-af73-3a5c0418de93","Type":"ContainerDied","Data":"ead6721d9e3dd513e9b22bae105bbd48b465dff45673bd08c03d08293f46c111"} Jun 06 09:33:09 crc kubenswrapper[4911]: I0606 09:33:09.166389 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:09 crc kubenswrapper[4911]: I0606 09:33:09.166950 4911 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jun 06 09:33:09 crc kubenswrapper[4911]: I0606 09:33:09.167539 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jun 06 09:33:10 crc kubenswrapper[4911]: I0606 09:33:10.698728 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:33:10 crc kubenswrapper[4911]: I0606 09:33:10.699402 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-central-agent" containerID="cri-o://c7727c01e2fcc2cbcea1cfa164f6f8f40cdb6d4f2f1d356d7ff5d342baa49b54" gracePeriod=30 Jun 06 09:33:10 crc kubenswrapper[4911]: I0606 09:33:10.699480 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="sg-core" containerID="cri-o://073e38b9a7e8987546b532264cde763b88a042bd531364f08f0dfb36f1757b6e" gracePeriod=30 Jun 06 09:33:10 crc kubenswrapper[4911]: I0606 09:33:10.699515 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-notification-agent" containerID="cri-o://3dd4662179adf4c8b70a0f3572c715c3e21b380a3394d8803d67149214cef055" gracePeriod=30 Jun 06 09:33:10 crc kubenswrapper[4911]: I0606 09:33:10.699461 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="proxy-httpd" containerID="cri-o://74622eae93c5196a3c70307c4d89a71379eae1a53a92bc12e388051584ff9f3b" gracePeriod=30 Jun 06 09:33:11 crc kubenswrapper[4911]: I0606 09:33:11.792367 4911 generic.go:334] "Generic (PLEG): container finished" podID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerID="74622eae93c5196a3c70307c4d89a71379eae1a53a92bc12e388051584ff9f3b" exitCode=0 Jun 06 09:33:11 crc kubenswrapper[4911]: I0606 09:33:11.792691 4911 generic.go:334] "Generic (PLEG): container finished" podID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerID="073e38b9a7e8987546b532264cde763b88a042bd531364f08f0dfb36f1757b6e" exitCode=2 Jun 06 09:33:11 crc kubenswrapper[4911]: I0606 09:33:11.792701 4911 generic.go:334] "Generic (PLEG): container finished" podID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerID="c7727c01e2fcc2cbcea1cfa164f6f8f40cdb6d4f2f1d356d7ff5d342baa49b54" exitCode=0 Jun 06 09:33:11 crc kubenswrapper[4911]: I0606 09:33:11.792476 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerDied","Data":"74622eae93c5196a3c70307c4d89a71379eae1a53a92bc12e388051584ff9f3b"} Jun 06 09:33:11 crc kubenswrapper[4911]: I0606 09:33:11.792784 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerDied","Data":"073e38b9a7e8987546b532264cde763b88a042bd531364f08f0dfb36f1757b6e"} Jun 06 09:33:11 crc kubenswrapper[4911]: I0606 09:33:11.792796 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerDied","Data":"c7727c01e2fcc2cbcea1cfa164f6f8f40cdb6d4f2f1d356d7ff5d342baa49b54"} Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.320822 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.425037 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snx4r\" (UniqueName: \"kubernetes.io/projected/3a014170-9bee-45a3-af73-3a5c0418de93-kube-api-access-snx4r\") pod \"3a014170-9bee-45a3-af73-3a5c0418de93\" (UID: \"3a014170-9bee-45a3-af73-3a5c0418de93\") " Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.431280 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a014170-9bee-45a3-af73-3a5c0418de93-kube-api-access-snx4r" (OuterVolumeSpecName: "kube-api-access-snx4r") pod "3a014170-9bee-45a3-af73-3a5c0418de93" (UID: "3a014170-9bee-45a3-af73-3a5c0418de93"). InnerVolumeSpecName "kube-api-access-snx4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.527624 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snx4r\" (UniqueName: \"kubernetes.io/projected/3a014170-9bee-45a3-af73-3a5c0418de93-kube-api-access-snx4r\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.805968 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" event={"ID":"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa","Type":"ContainerStarted","Data":"99f4b04fae1d8cb02541a0f900906221107c2423c0d4ace858cc013630231321"} Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.809704 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a014170-9bee-45a3-af73-3a5c0418de93","Type":"ContainerDied","Data":"5e819ab020d5fdf3b89541e988928d64f2e89c2f9a5f88337723f83989c91e1a"} Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.809762 4911 scope.go:117] "RemoveContainer" containerID="ead6721d9e3dd513e9b22bae105bbd48b465dff45673bd08c03d08293f46c111" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.809816 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.846374 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" podStartSLOduration=2.025916007 podStartE2EDuration="10.846348259s" podCreationTimestamp="2025-06-06 09:33:02 +0000 UTC" firstStartedPulling="2025-06-06 09:33:03.195850867 +0000 UTC m=+1194.471276410" lastFinishedPulling="2025-06-06 09:33:12.016283119 +0000 UTC m=+1203.291708662" observedRunningTime="2025-06-06 09:33:12.839086982 +0000 UTC m=+1204.114512525" watchObservedRunningTime="2025-06-06 09:33:12.846348259 +0000 UTC m=+1204.121773802" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.863283 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.875665 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.888881 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:33:12 crc kubenswrapper[4911]: E0606 09:33:12.889529 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a014170-9bee-45a3-af73-3a5c0418de93" containerName="kube-state-metrics" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.889558 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a014170-9bee-45a3-af73-3a5c0418de93" containerName="kube-state-metrics" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.889801 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a014170-9bee-45a3-af73-3a5c0418de93" containerName="kube-state-metrics" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.890621 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.894776 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.895019 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jun 06 09:33:12 crc kubenswrapper[4911]: I0606 09:33:12.907375 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.038521 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.038670 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnc49\" (UniqueName: \"kubernetes.io/projected/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-api-access-xnc49\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.039330 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.039562 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.141887 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.142061 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnc49\" (UniqueName: \"kubernetes.io/projected/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-api-access-xnc49\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.142538 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.143121 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.152039 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.152178 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.153347 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.162455 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnc49\" (UniqueName: \"kubernetes.io/projected/1be3e7e6-2701-48b1-b26b-a154930ba2bb-kube-api-access-xnc49\") pod \"kube-state-metrics-0\" (UID: \"1be3e7e6-2701-48b1-b26b-a154930ba2bb\") " pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.214072 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.762553 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.821148 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1be3e7e6-2701-48b1-b26b-a154930ba2bb","Type":"ContainerStarted","Data":"d3b983bed5bcb564270934a1290e0a9ee7c3ffb176ce315b63b8c082c0c15159"} Jun 06 09:33:13 crc kubenswrapper[4911]: I0606 09:33:13.961831 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a014170-9bee-45a3-af73-3a5c0418de93" path="/var/lib/kubelet/pods/3a014170-9bee-45a3-af73-3a5c0418de93/volumes" Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.798403 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-bl2zm"] Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.801631 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-bl2zm" podUID="a1c90b25-840e-4bb0-b19e-82bd3585cdf5" containerName="container-00" containerID="cri-o://5d1343d141c059a94a5c3e1a7b4989040f764ec3adb17a96f95dc552b836b175" gracePeriod=2 Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.807913 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-bl2zm"] Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.840171 4911 generic.go:334] "Generic (PLEG): container finished" podID="a1c90b25-840e-4bb0-b19e-82bd3585cdf5" containerID="5d1343d141c059a94a5c3e1a7b4989040f764ec3adb17a96f95dc552b836b175" exitCode=0 Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.840297 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c560248809ab55c3dc8384e2f42946195f032ba3df6a93446f33c25a4c70e22" Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.842612 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1be3e7e6-2701-48b1-b26b-a154930ba2bb","Type":"ContainerStarted","Data":"e483126b005d6812969fa492ed67874ee2b6ad49bc7c2ee9e15035bf20ac628f"} Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.842818 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.867487 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.506658582 podStartE2EDuration="2.867463459s" podCreationTimestamp="2025-06-06 09:33:12 +0000 UTC" firstStartedPulling="2025-06-06 09:33:13.766265406 +0000 UTC m=+1205.041690939" lastFinishedPulling="2025-06-06 09:33:14.127070273 +0000 UTC m=+1205.402495816" observedRunningTime="2025-06-06 09:33:14.85894219 +0000 UTC m=+1206.134367743" watchObservedRunningTime="2025-06-06 09:33:14.867463459 +0000 UTC m=+1206.142889002" Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.888054 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bl2zm" Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.978335 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-host\") pod \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.978530 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htzj8\" (UniqueName: \"kubernetes.io/projected/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-kube-api-access-htzj8\") pod \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\" (UID: \"a1c90b25-840e-4bb0-b19e-82bd3585cdf5\") " Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.978878 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-host" (OuterVolumeSpecName: "host") pod "a1c90b25-840e-4bb0-b19e-82bd3585cdf5" (UID: "a1c90b25-840e-4bb0-b19e-82bd3585cdf5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.979621 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:14 crc kubenswrapper[4911]: I0606 09:33:14.988301 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-kube-api-access-htzj8" (OuterVolumeSpecName: "kube-api-access-htzj8") pod "a1c90b25-840e-4bb0-b19e-82bd3585cdf5" (UID: "a1c90b25-840e-4bb0-b19e-82bd3585cdf5"). InnerVolumeSpecName "kube-api-access-htzj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:15 crc kubenswrapper[4911]: I0606 09:33:15.082150 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htzj8\" (UniqueName: \"kubernetes.io/projected/a1c90b25-840e-4bb0-b19e-82bd3585cdf5-kube-api-access-htzj8\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:15 crc kubenswrapper[4911]: I0606 09:33:15.850629 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bl2zm" Jun 06 09:33:15 crc kubenswrapper[4911]: I0606 09:33:15.959052 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1c90b25-840e-4bb0-b19e-82bd3585cdf5" path="/var/lib/kubelet/pods/a1c90b25-840e-4bb0-b19e-82bd3585cdf5/volumes" Jun 06 09:33:17 crc kubenswrapper[4911]: I0606 09:33:17.875528 4911 generic.go:334] "Generic (PLEG): container finished" podID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerID="3dd4662179adf4c8b70a0f3572c715c3e21b380a3394d8803d67149214cef055" exitCode=0 Jun 06 09:33:17 crc kubenswrapper[4911]: I0606 09:33:17.875928 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerDied","Data":"3dd4662179adf4c8b70a0f3572c715c3e21b380a3394d8803d67149214cef055"} Jun 06 09:33:17 crc kubenswrapper[4911]: I0606 09:33:17.875962 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7","Type":"ContainerDied","Data":"b73837d9cedf1ade100b0de24e2ffb29f8ab08690540ae9a1d7dcf87396eb76c"} Jun 06 09:33:17 crc kubenswrapper[4911]: I0606 09:33:17.875975 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b73837d9cedf1ade100b0de24e2ffb29f8ab08690540ae9a1d7dcf87396eb76c" Jun 06 09:33:17 crc kubenswrapper[4911]: I0606 09:33:17.932485 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.062874 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-config-data\") pod \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.062962 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-scripts\") pod \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.063122 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwhmw\" (UniqueName: \"kubernetes.io/projected/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-kube-api-access-nwhmw\") pod \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.063204 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-combined-ca-bundle\") pod \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.063247 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-log-httpd\") pod \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.063333 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-sg-core-conf-yaml\") pod \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.063423 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-run-httpd\") pod \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\" (UID: \"cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7\") " Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.064606 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" (UID: "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.065295 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" (UID: "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.069290 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-scripts" (OuterVolumeSpecName: "scripts") pod "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" (UID: "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.070379 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-kube-api-access-nwhmw" (OuterVolumeSpecName: "kube-api-access-nwhmw") pod "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" (UID: "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7"). InnerVolumeSpecName "kube-api-access-nwhmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.104381 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" (UID: "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.158437 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" (UID: "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.166158 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwhmw\" (UniqueName: \"kubernetes.io/projected/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-kube-api-access-nwhmw\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.166194 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.166210 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-log-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.166220 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.166230 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-run-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.166239 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.183558 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-config-data" (OuterVolumeSpecName: "config-data") pod "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" (UID: "cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.268326 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.886730 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.927013 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.939679 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.950130 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:33:18 crc kubenswrapper[4911]: E0606 09:33:18.950630 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="sg-core" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.950660 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="sg-core" Jun 06 09:33:18 crc kubenswrapper[4911]: E0606 09:33:18.950681 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-notification-agent" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.950691 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-notification-agent" Jun 06 09:33:18 crc kubenswrapper[4911]: E0606 09:33:18.950706 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c90b25-840e-4bb0-b19e-82bd3585cdf5" containerName="container-00" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.950718 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c90b25-840e-4bb0-b19e-82bd3585cdf5" containerName="container-00" Jun 06 09:33:18 crc kubenswrapper[4911]: E0606 09:33:18.950745 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-central-agent" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.950757 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-central-agent" Jun 06 09:33:18 crc kubenswrapper[4911]: E0606 09:33:18.950787 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="proxy-httpd" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.950796 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="proxy-httpd" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.950998 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="sg-core" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.951026 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-central-agent" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.951044 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c90b25-840e-4bb0-b19e-82bd3585cdf5" containerName="container-00" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.951059 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="ceilometer-notification-agent" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.951071 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" containerName="proxy-httpd" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.954398 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.960153 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.960396 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.961075 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jun 06 09:33:18 crc kubenswrapper[4911]: I0606 09:33:18.971538 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083367 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083423 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-log-httpd\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083695 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-run-httpd\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083759 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrndv\" (UniqueName: \"kubernetes.io/projected/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-kube-api-access-qrndv\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083788 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-scripts\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.083810 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-config-data\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186073 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-run-httpd\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186498 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186530 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrndv\" (UniqueName: \"kubernetes.io/projected/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-kube-api-access-qrndv\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186553 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-scripts\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186569 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-config-data\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186632 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186650 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186689 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-log-httpd\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.186719 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-run-httpd\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.187066 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-log-httpd\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.192155 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.192778 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-scripts\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.193239 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-config-data\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.193521 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.193656 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.204279 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrndv\" (UniqueName: \"kubernetes.io/projected/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-kube-api-access-qrndv\") pod \"ceilometer-0\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.277185 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.806647 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.898363 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerStarted","Data":"2b2e65fde91f84e5e5537d1d4d28880ebf2df80e9ed7d39ae2b514e8cc958c59"} Jun 06 09:33:19 crc kubenswrapper[4911]: I0606 09:33:19.962488 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7" path="/var/lib/kubelet/pods/cd2b9432-65a7-4dea-8cad-d2b7b4d9a5f7/volumes" Jun 06 09:33:20 crc kubenswrapper[4911]: I0606 09:33:20.913749 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerStarted","Data":"193854695dad61cd28031ca959496fb35970e298df6de7a80b9c033e618a029d"} Jun 06 09:33:21 crc kubenswrapper[4911]: I0606 09:33:21.925230 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerStarted","Data":"e3d551ccd655c88958ed221e4f631112bc911aefaa6fdf08a7d6d790b822eacc"} Jun 06 09:33:21 crc kubenswrapper[4911]: I0606 09:33:21.925775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerStarted","Data":"31de43dbc2667d1d8e10cd2f5a1e4115329d50ae59209787fd17c0e6c4cfd900"} Jun 06 09:33:23 crc kubenswrapper[4911]: I0606 09:33:23.228417 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jun 06 09:33:23 crc kubenswrapper[4911]: I0606 09:33:23.967145 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerStarted","Data":"86a51c83ca2759180c63f61492dfa41a744a52fd5cdd3f08cd78d5798acf2ca8"} Jun 06 09:33:23 crc kubenswrapper[4911]: I0606 09:33:23.968261 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jun 06 09:33:23 crc kubenswrapper[4911]: I0606 09:33:23.993267 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.925131302 podStartE2EDuration="5.993239597s" podCreationTimestamp="2025-06-06 09:33:18 +0000 UTC" firstStartedPulling="2025-06-06 09:33:19.813015208 +0000 UTC m=+1211.088440751" lastFinishedPulling="2025-06-06 09:33:22.881123503 +0000 UTC m=+1214.156549046" observedRunningTime="2025-06-06 09:33:23.98946701 +0000 UTC m=+1215.264892573" watchObservedRunningTime="2025-06-06 09:33:23.993239597 +0000 UTC m=+1215.268665130" Jun 06 09:33:25 crc kubenswrapper[4911]: I0606 09:33:25.978471 4911 generic.go:334] "Generic (PLEG): container finished" podID="aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" containerID="99f4b04fae1d8cb02541a0f900906221107c2423c0d4ace858cc013630231321" exitCode=0 Jun 06 09:33:25 crc kubenswrapper[4911]: I0606 09:33:25.978655 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" event={"ID":"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa","Type":"ContainerDied","Data":"99f4b04fae1d8cb02541a0f900906221107c2423c0d4ace858cc013630231321"} Jun 06 09:33:27 crc kubenswrapper[4911]: I0606 09:33:27.942004 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.006226 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" event={"ID":"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa","Type":"ContainerDied","Data":"951ecaeadda1fa7c2a5c92d6a212b399eb6fe87abf1ef8e1b2f0cdc39c3057a8"} Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.006280 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="951ecaeadda1fa7c2a5c92d6a212b399eb6fe87abf1ef8e1b2f0cdc39c3057a8" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.006314 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6rrkj" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.068318 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-combined-ca-bundle\") pod \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.068415 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-scripts\") pod \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.068474 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvjxg\" (UniqueName: \"kubernetes.io/projected/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-kube-api-access-cvjxg\") pod \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.068548 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-config-data\") pod \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\" (UID: \"aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa\") " Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.077363 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-kube-api-access-cvjxg" (OuterVolumeSpecName: "kube-api-access-cvjxg") pod "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" (UID: "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa"). InnerVolumeSpecName "kube-api-access-cvjxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.078351 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-scripts" (OuterVolumeSpecName: "scripts") pod "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" (UID: "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.112231 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-config-data" (OuterVolumeSpecName: "config-data") pod "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" (UID: "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.123141 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" (UID: "aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.171414 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.171480 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvjxg\" (UniqueName: \"kubernetes.io/projected/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-kube-api-access-cvjxg\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.171496 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.171510 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.212796 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jun 06 09:33:28 crc kubenswrapper[4911]: E0606 09:33:28.214601 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" containerName="nova-cell0-conductor-db-sync" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.220975 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" containerName="nova-cell0-conductor-db-sync" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.223462 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" containerName="nova-cell0-conductor-db-sync" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.226328 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.229486 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.274374 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ab8896-1ee7-4c4d-a713-5c28744fce0d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.274453 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ab8896-1ee7-4c4d-a713-5c28744fce0d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.274645 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ng7t4\" (UniqueName: \"kubernetes.io/projected/42ab8896-1ee7-4c4d-a713-5c28744fce0d-kube-api-access-ng7t4\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.377616 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ab8896-1ee7-4c4d-a713-5c28744fce0d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.378000 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ab8896-1ee7-4c4d-a713-5c28744fce0d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.378054 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ng7t4\" (UniqueName: \"kubernetes.io/projected/42ab8896-1ee7-4c4d-a713-5c28744fce0d-kube-api-access-ng7t4\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.383206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ab8896-1ee7-4c4d-a713-5c28744fce0d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.383698 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ab8896-1ee7-4c4d-a713-5c28744fce0d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.402019 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ng7t4\" (UniqueName: \"kubernetes.io/projected/42ab8896-1ee7-4c4d-a713-5c28744fce0d-kube-api-access-ng7t4\") pod \"nova-cell0-conductor-0\" (UID: \"42ab8896-1ee7-4c4d-a713-5c28744fce0d\") " pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:28 crc kubenswrapper[4911]: I0606 09:33:28.553472 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:29 crc kubenswrapper[4911]: I0606 09:33:29.014851 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jun 06 09:33:30 crc kubenswrapper[4911]: I0606 09:33:30.031145 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"42ab8896-1ee7-4c4d-a713-5c28744fce0d","Type":"ContainerStarted","Data":"742128ceeca3246a5de4482c1070ad87db0d3352b65b2be5bf523e5e8c9c13f6"} Jun 06 09:33:30 crc kubenswrapper[4911]: I0606 09:33:30.031782 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:30 crc kubenswrapper[4911]: I0606 09:33:30.031801 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"42ab8896-1ee7-4c4d-a713-5c28744fce0d","Type":"ContainerStarted","Data":"9c3f053fd3faa680d140bba5fbba40a4c966ca2846b8dc2c9fe248d32f6484e1"} Jun 06 09:33:30 crc kubenswrapper[4911]: I0606 09:33:30.053041 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.053018385 podStartE2EDuration="2.053018385s" podCreationTimestamp="2025-06-06 09:33:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:30.047483462 +0000 UTC m=+1221.322909025" watchObservedRunningTime="2025-06-06 09:33:30.053018385 +0000 UTC m=+1221.328443938" Jun 06 09:33:38 crc kubenswrapper[4911]: I0606 09:33:38.584760 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.311700 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-hbh54"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.313711 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.315856 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.316323 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.339197 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hbh54"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.416937 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.417136 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-scripts\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.417181 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64g4g\" (UniqueName: \"kubernetes.io/projected/ca5e53cd-23ba-4460-80b0-4c2cca13773e-kube-api-access-64g4g\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.417252 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-config-data\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.519052 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-config-data\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.519457 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.519671 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-scripts\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.519791 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64g4g\" (UniqueName: \"kubernetes.io/projected/ca5e53cd-23ba-4460-80b0-4c2cca13773e-kube-api-access-64g4g\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.525895 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.546291 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-scripts\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.547353 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-config-data\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.556200 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.559595 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.560319 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64g4g\" (UniqueName: \"kubernetes.io/projected/ca5e53cd-23ba-4460-80b0-4c2cca13773e-kube-api-access-64g4g\") pod \"nova-cell0-cell-mapping-hbh54\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.564453 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.602527 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.630990 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.631183 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5l4c\" (UniqueName: \"kubernetes.io/projected/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-kube-api-access-p5l4c\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.631337 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-config-data\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.631378 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-logs\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.645795 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.707495 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.708954 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.721912 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.733952 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.734035 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-config-data\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.734080 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.734135 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5l4c\" (UniqueName: \"kubernetes.io/projected/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-kube-api-access-p5l4c\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.734161 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.734276 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-config-data\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.734313 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-logs\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.734346 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46hlc\" (UniqueName: \"kubernetes.io/projected/16811511-12af-4ab9-9a29-dcfed29b0f25-kube-api-access-46hlc\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.735402 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-logs\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.759156 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.776131 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-config-data\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.797121 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.798986 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5l4c\" (UniqueName: \"kubernetes.io/projected/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-kube-api-access-p5l4c\") pod \"nova-api-0\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " pod="openstack/nova-api-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.801639 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.815745 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.832772 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.840349 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-config-data\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.840793 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6fkk\" (UniqueName: \"kubernetes.io/projected/3020b927-6fc7-446c-b89f-62a845e61aee-kube-api-access-c6fkk\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.840844 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.840861 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-config-data\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.840895 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3020b927-6fc7-446c-b89f-62a845e61aee-logs\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.840921 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.841054 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46hlc\" (UniqueName: \"kubernetes.io/projected/16811511-12af-4ab9-9a29-dcfed29b0f25-kube-api-access-46hlc\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.849157 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-config-data\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.851022 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.889404 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46hlc\" (UniqueName: \"kubernetes.io/projected/16811511-12af-4ab9-9a29-dcfed29b0f25-kube-api-access-46hlc\") pod \"nova-scheduler-0\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.909434 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.910640 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.912608 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.917914 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.918738 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.931272 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-686c8bcc79-fgqg8"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.943144 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.943251 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.943282 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9t7r\" (UniqueName: \"kubernetes.io/projected/994636bb-4927-4649-9032-3b3c28eb6289-kube-api-access-m9t7r\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.943307 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6fkk\" (UniqueName: \"kubernetes.io/projected/3020b927-6fc7-446c-b89f-62a845e61aee-kube-api-access-c6fkk\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.943334 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.943351 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-config-data\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.943373 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3020b927-6fc7-446c-b89f-62a845e61aee-logs\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.947998 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.948969 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-686c8bcc79-fgqg8"] Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.950191 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3020b927-6fc7-446c-b89f-62a845e61aee-logs\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.953773 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.958845 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-config-data\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.971739 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6fkk\" (UniqueName: \"kubernetes.io/projected/3020b927-6fc7-446c-b89f-62a845e61aee-kube-api-access-c6fkk\") pod \"nova-metadata-0\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " pod="openstack/nova-metadata-0" Jun 06 09:33:39 crc kubenswrapper[4911]: I0606 09:33:39.997135 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.045758 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-svc\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.045813 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-swift-storage-0\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.045835 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64qqv\" (UniqueName: \"kubernetes.io/projected/0a07157b-eade-4d9d-8d5d-277ed8ff407e-kube-api-access-64qqv\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.045881 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.045944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-sb\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.045983 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.046023 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9t7r\" (UniqueName: \"kubernetes.io/projected/994636bb-4927-4649-9032-3b3c28eb6289-kube-api-access-m9t7r\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.046040 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-nb\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.046252 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-config\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.051756 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.051816 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.066661 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9t7r\" (UniqueName: \"kubernetes.io/projected/994636bb-4927-4649-9032-3b3c28eb6289-kube-api-access-m9t7r\") pod \"nova-cell1-novncproxy-0\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.151585 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-swift-storage-0\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.151651 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64qqv\" (UniqueName: \"kubernetes.io/projected/0a07157b-eade-4d9d-8d5d-277ed8ff407e-kube-api-access-64qqv\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.151823 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-sb\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.151945 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-nb\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.152109 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-config\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.152255 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-svc\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.152583 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-swift-storage-0\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.153204 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-svc\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.153804 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-config\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.154072 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-nb\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.154185 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-sb\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.172406 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64qqv\" (UniqueName: \"kubernetes.io/projected/0a07157b-eade-4d9d-8d5d-277ed8ff407e-kube-api-access-64qqv\") pod \"dnsmasq-dns-686c8bcc79-fgqg8\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.250208 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.276298 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.286814 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.856368 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:40 crc kubenswrapper[4911]: I0606 09:33:40.865030 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hbh54"] Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.144612 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16811511-12af-4ab9-9a29-dcfed29b0f25","Type":"ContainerStarted","Data":"63f4ba02671ec0d1580b577c53c04595e95770b779455957f847f75ca4026d40"} Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.146834 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hbh54" event={"ID":"ca5e53cd-23ba-4460-80b0-4c2cca13773e","Type":"ContainerStarted","Data":"90455f720a73953e46daa20e61e5cd68ecc7ddf301a441a4b34a908bfdd660ee"} Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.146882 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hbh54" event={"ID":"ca5e53cd-23ba-4460-80b0-4c2cca13773e","Type":"ContainerStarted","Data":"162fa8275da1373a654fdb38555f99c569ede84dacee3ca6aea7cf39806e9abc"} Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.174288 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-hbh54" podStartSLOduration=2.174245532 podStartE2EDuration="2.174245532s" podCreationTimestamp="2025-06-06 09:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:41.163434084 +0000 UTC m=+1232.438859627" watchObservedRunningTime="2025-06-06 09:33:41.174245532 +0000 UTC m=+1232.449671075" Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.650720 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.709244 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.764045 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-686c8bcc79-fgqg8"] Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.783284 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.830299 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9fkt8"] Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.832068 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.835308 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.835389 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.841182 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9fkt8"] Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.897339 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.897399 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-scripts\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.897449 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59xw7\" (UniqueName: \"kubernetes.io/projected/1f5dd978-318c-4d2d-88f4-7c4b01712832-kube-api-access-59xw7\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:41 crc kubenswrapper[4911]: I0606 09:33:41.897528 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.000295 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.000350 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-scripts\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.000392 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59xw7\" (UniqueName: \"kubernetes.io/projected/1f5dd978-318c-4d2d-88f4-7c4b01712832-kube-api-access-59xw7\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.000456 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.005906 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-scripts\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.011767 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.022482 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.024018 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59xw7\" (UniqueName: \"kubernetes.io/projected/1f5dd978-318c-4d2d-88f4-7c4b01712832-kube-api-access-59xw7\") pod \"nova-cell1-conductor-db-sync-9fkt8\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.160049 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"994636bb-4927-4649-9032-3b3c28eb6289","Type":"ContainerStarted","Data":"6e2863968986bd4ab1affe4e3fb4bfe214b25ec743e473f5da28056aba624ae6"} Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.163733 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"767fea6a-8b05-4ce5-a498-1b6fbc4aca86","Type":"ContainerStarted","Data":"9ed01fb27f6c34db495a3858d9883d1ba48cc09501b3f6dc3c8a680f7433e466"} Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.166805 4911 generic.go:334] "Generic (PLEG): container finished" podID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerID="10432df8cde42a2b0832ed1159a6effbdc492a6dbfe845505e76efe265ad78be" exitCode=0 Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.166871 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" event={"ID":"0a07157b-eade-4d9d-8d5d-277ed8ff407e","Type":"ContainerDied","Data":"10432df8cde42a2b0832ed1159a6effbdc492a6dbfe845505e76efe265ad78be"} Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.166894 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" event={"ID":"0a07157b-eade-4d9d-8d5d-277ed8ff407e","Type":"ContainerStarted","Data":"d870554e6074fd449e70c75e9f71083f0aaf215e9fdbc2e2349e08a1fce6864c"} Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.171132 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3020b927-6fc7-446c-b89f-62a845e61aee","Type":"ContainerStarted","Data":"b2e10c95c0ae3ab18e9673dd34bcd710274321e4d0434a38d59c568be64a60c6"} Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.262043 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:42 crc kubenswrapper[4911]: I0606 09:33:42.939324 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9fkt8"] Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.191991 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" event={"ID":"0a07157b-eade-4d9d-8d5d-277ed8ff407e","Type":"ContainerStarted","Data":"0da0ec17c4e5f7f9be78247f771ab685f6779d57962f755fb2c6e84a2f6c657d"} Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.192522 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.195576 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" event={"ID":"1f5dd978-318c-4d2d-88f4-7c4b01712832","Type":"ContainerStarted","Data":"a79aacaa20f80e37e3ba488e41ae9813c68900e09bd97e6ffe12c05aad5ebaaf"} Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.198387 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16811511-12af-4ab9-9a29-dcfed29b0f25","Type":"ContainerStarted","Data":"48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592"} Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.216418 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" podStartSLOduration=4.216396816 podStartE2EDuration="4.216396816s" podCreationTimestamp="2025-06-06 09:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:43.212444714 +0000 UTC m=+1234.487870257" watchObservedRunningTime="2025-06-06 09:33:43.216396816 +0000 UTC m=+1234.491822359" Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.247479 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.5968356630000002 podStartE2EDuration="4.247458017s" podCreationTimestamp="2025-06-06 09:33:39 +0000 UTC" firstStartedPulling="2025-06-06 09:33:40.871999277 +0000 UTC m=+1232.147424820" lastFinishedPulling="2025-06-06 09:33:42.522621621 +0000 UTC m=+1233.798047174" observedRunningTime="2025-06-06 09:33:43.229843802 +0000 UTC m=+1234.505269365" watchObservedRunningTime="2025-06-06 09:33:43.247458017 +0000 UTC m=+1234.522883570" Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.358212 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:33:43 crc kubenswrapper[4911]: I0606 09:33:43.375217 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:44 crc kubenswrapper[4911]: I0606 09:33:44.211149 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" event={"ID":"1f5dd978-318c-4d2d-88f4-7c4b01712832","Type":"ContainerStarted","Data":"08ee614aae402d43cee536e54185e90f0702e570df1f08c863aa74b598aafdb9"} Jun 06 09:33:44 crc kubenswrapper[4911]: I0606 09:33:44.254660 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" podStartSLOduration=3.254626834 podStartE2EDuration="3.254626834s" podCreationTimestamp="2025-06-06 09:33:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:44.237309067 +0000 UTC m=+1235.512734630" watchObservedRunningTime="2025-06-06 09:33:44.254626834 +0000 UTC m=+1235.530052377" Jun 06 09:33:44 crc kubenswrapper[4911]: I0606 09:33:44.913513 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.229595 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"767fea6a-8b05-4ce5-a498-1b6fbc4aca86","Type":"ContainerStarted","Data":"411d0b79fc69a3e4de6134f8801ff98c6a123aa27dc73a8b79f433ab745b1705"} Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.230104 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"767fea6a-8b05-4ce5-a498-1b6fbc4aca86","Type":"ContainerStarted","Data":"f2d7fda9784d762457a6116548a8bc38635efbc7544662e0202bf0bab5e6dadc"} Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.233284 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3020b927-6fc7-446c-b89f-62a845e61aee","Type":"ContainerStarted","Data":"0de52fefbe9f330406ba86f1a81468eeba3161bcf408d2a5b2e6cce9b927db60"} Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.233333 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3020b927-6fc7-446c-b89f-62a845e61aee","Type":"ContainerStarted","Data":"8a7fc1455bcc5b80c31b7b1be028a79f7dbea33dd47975b8410481c036b2bfd9"} Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.233401 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-log" containerID="cri-o://8a7fc1455bcc5b80c31b7b1be028a79f7dbea33dd47975b8410481c036b2bfd9" gracePeriod=30 Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.233422 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-metadata" containerID="cri-o://0de52fefbe9f330406ba86f1a81468eeba3161bcf408d2a5b2e6cce9b927db60" gracePeriod=30 Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.235994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"994636bb-4927-4649-9032-3b3c28eb6289","Type":"ContainerStarted","Data":"ba30b079d6e62e5132e1054023728456e28350b9f907f726afbf7cc907007e0d"} Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.236071 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="994636bb-4927-4649-9032-3b3c28eb6289" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://ba30b079d6e62e5132e1054023728456e28350b9f907f726afbf7cc907007e0d" gracePeriod=30 Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.266373 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.097079728 podStartE2EDuration="7.266352072s" podCreationTimestamp="2025-06-06 09:33:39 +0000 UTC" firstStartedPulling="2025-06-06 09:33:41.708326978 +0000 UTC m=+1232.983752521" lastFinishedPulling="2025-06-06 09:33:44.877599322 +0000 UTC m=+1236.153024865" observedRunningTime="2025-06-06 09:33:46.252213697 +0000 UTC m=+1237.527639250" watchObservedRunningTime="2025-06-06 09:33:46.266352072 +0000 UTC m=+1237.541777615" Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.278641 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=4.106000657 podStartE2EDuration="7.278618168s" podCreationTimestamp="2025-06-06 09:33:39 +0000 UTC" firstStartedPulling="2025-06-06 09:33:41.700012673 +0000 UTC m=+1232.975438216" lastFinishedPulling="2025-06-06 09:33:44.872630184 +0000 UTC m=+1236.148055727" observedRunningTime="2025-06-06 09:33:46.272013268 +0000 UTC m=+1237.547438811" watchObservedRunningTime="2025-06-06 09:33:46.278618168 +0000 UTC m=+1237.554043711" Jun 06 09:33:46 crc kubenswrapper[4911]: I0606 09:33:46.288827 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.115885163 podStartE2EDuration="7.288806471s" podCreationTimestamp="2025-06-06 09:33:39 +0000 UTC" firstStartedPulling="2025-06-06 09:33:41.699215993 +0000 UTC m=+1232.974641536" lastFinishedPulling="2025-06-06 09:33:44.872137291 +0000 UTC m=+1236.147562844" observedRunningTime="2025-06-06 09:33:46.287015675 +0000 UTC m=+1237.562441218" watchObservedRunningTime="2025-06-06 09:33:46.288806471 +0000 UTC m=+1237.564232014" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.248211 4911 generic.go:334] "Generic (PLEG): container finished" podID="3020b927-6fc7-446c-b89f-62a845e61aee" containerID="0de52fefbe9f330406ba86f1a81468eeba3161bcf408d2a5b2e6cce9b927db60" exitCode=0 Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.249260 4911 generic.go:334] "Generic (PLEG): container finished" podID="3020b927-6fc7-446c-b89f-62a845e61aee" containerID="8a7fc1455bcc5b80c31b7b1be028a79f7dbea33dd47975b8410481c036b2bfd9" exitCode=143 Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.248302 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3020b927-6fc7-446c-b89f-62a845e61aee","Type":"ContainerDied","Data":"0de52fefbe9f330406ba86f1a81468eeba3161bcf408d2a5b2e6cce9b927db60"} Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.249435 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3020b927-6fc7-446c-b89f-62a845e61aee","Type":"ContainerDied","Data":"8a7fc1455bcc5b80c31b7b1be028a79f7dbea33dd47975b8410481c036b2bfd9"} Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.415902 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.539565 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-combined-ca-bundle\") pod \"3020b927-6fc7-446c-b89f-62a845e61aee\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.539959 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6fkk\" (UniqueName: \"kubernetes.io/projected/3020b927-6fc7-446c-b89f-62a845e61aee-kube-api-access-c6fkk\") pod \"3020b927-6fc7-446c-b89f-62a845e61aee\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.540135 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-config-data\") pod \"3020b927-6fc7-446c-b89f-62a845e61aee\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.540246 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3020b927-6fc7-446c-b89f-62a845e61aee-logs\") pod \"3020b927-6fc7-446c-b89f-62a845e61aee\" (UID: \"3020b927-6fc7-446c-b89f-62a845e61aee\") " Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.541074 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3020b927-6fc7-446c-b89f-62a845e61aee-logs" (OuterVolumeSpecName: "logs") pod "3020b927-6fc7-446c-b89f-62a845e61aee" (UID: "3020b927-6fc7-446c-b89f-62a845e61aee"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.545719 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3020b927-6fc7-446c-b89f-62a845e61aee-kube-api-access-c6fkk" (OuterVolumeSpecName: "kube-api-access-c6fkk") pod "3020b927-6fc7-446c-b89f-62a845e61aee" (UID: "3020b927-6fc7-446c-b89f-62a845e61aee"). InnerVolumeSpecName "kube-api-access-c6fkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.570057 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3020b927-6fc7-446c-b89f-62a845e61aee" (UID: "3020b927-6fc7-446c-b89f-62a845e61aee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.571008 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-config-data" (OuterVolumeSpecName: "config-data") pod "3020b927-6fc7-446c-b89f-62a845e61aee" (UID: "3020b927-6fc7-446c-b89f-62a845e61aee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.642913 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.642958 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6fkk\" (UniqueName: \"kubernetes.io/projected/3020b927-6fc7-446c-b89f-62a845e61aee-kube-api-access-c6fkk\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.642973 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3020b927-6fc7-446c-b89f-62a845e61aee-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:47 crc kubenswrapper[4911]: I0606 09:33:47.642984 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3020b927-6fc7-446c-b89f-62a845e61aee-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.260034 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3020b927-6fc7-446c-b89f-62a845e61aee","Type":"ContainerDied","Data":"b2e10c95c0ae3ab18e9673dd34bcd710274321e4d0434a38d59c568be64a60c6"} Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.260109 4911 scope.go:117] "RemoveContainer" containerID="0de52fefbe9f330406ba86f1a81468eeba3161bcf408d2a5b2e6cce9b927db60" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.261491 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.286422 4911 scope.go:117] "RemoveContainer" containerID="8a7fc1455bcc5b80c31b7b1be028a79f7dbea33dd47975b8410481c036b2bfd9" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.293826 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.320495 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.339236 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:48 crc kubenswrapper[4911]: E0606 09:33:48.339749 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-log" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.339772 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-log" Jun 06 09:33:48 crc kubenswrapper[4911]: E0606 09:33:48.339793 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-metadata" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.339800 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-metadata" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.340026 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-log" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.340040 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" containerName="nova-metadata-metadata" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.341077 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.343810 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.344041 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.366679 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.458416 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.458758 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4ab6492-4428-4a47-8240-b2edcae69fce-logs\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.458793 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2z7d\" (UniqueName: \"kubernetes.io/projected/b4ab6492-4428-4a47-8240-b2edcae69fce-kube-api-access-g2z7d\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.458947 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-config-data\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.458973 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.560338 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4ab6492-4428-4a47-8240-b2edcae69fce-logs\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.560408 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2z7d\" (UniqueName: \"kubernetes.io/projected/b4ab6492-4428-4a47-8240-b2edcae69fce-kube-api-access-g2z7d\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.560565 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-config-data\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.560602 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.560657 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.560981 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4ab6492-4428-4a47-8240-b2edcae69fce-logs\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.566706 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.569934 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-config-data\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.576165 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.582776 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2z7d\" (UniqueName: \"kubernetes.io/projected/b4ab6492-4428-4a47-8240-b2edcae69fce-kube-api-access-g2z7d\") pod \"nova-metadata-0\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " pod="openstack/nova-metadata-0" Jun 06 09:33:48 crc kubenswrapper[4911]: I0606 09:33:48.669471 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:49 crc kubenswrapper[4911]: I0606 09:33:49.198882 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:49 crc kubenswrapper[4911]: W0606 09:33:49.200816 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4ab6492_4428_4a47_8240_b2edcae69fce.slice/crio-c6b4c970c22f3b3ad8cc62f281463bfd84253920506514837d883dd4a267c3a0 WatchSource:0}: Error finding container c6b4c970c22f3b3ad8cc62f281463bfd84253920506514837d883dd4a267c3a0: Status 404 returned error can't find the container with id c6b4c970c22f3b3ad8cc62f281463bfd84253920506514837d883dd4a267c3a0 Jun 06 09:33:49 crc kubenswrapper[4911]: I0606 09:33:49.269664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4ab6492-4428-4a47-8240-b2edcae69fce","Type":"ContainerStarted","Data":"c6b4c970c22f3b3ad8cc62f281463bfd84253920506514837d883dd4a267c3a0"} Jun 06 09:33:49 crc kubenswrapper[4911]: I0606 09:33:49.286440 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jun 06 09:33:49 crc kubenswrapper[4911]: I0606 09:33:49.913537 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jun 06 09:33:49 crc kubenswrapper[4911]: I0606 09:33:49.943443 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jun 06 09:33:49 crc kubenswrapper[4911]: I0606 09:33:49.970165 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3020b927-6fc7-446c-b89f-62a845e61aee" path="/var/lib/kubelet/pods/3020b927-6fc7-446c-b89f-62a845e61aee/volumes" Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.000081 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.000154 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.277112 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.283455 4911 generic.go:334] "Generic (PLEG): container finished" podID="ca5e53cd-23ba-4460-80b0-4c2cca13773e" containerID="90455f720a73953e46daa20e61e5cd68ecc7ddf301a441a4b34a908bfdd660ee" exitCode=0 Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.283517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hbh54" event={"ID":"ca5e53cd-23ba-4460-80b0-4c2cca13773e","Type":"ContainerDied","Data":"90455f720a73953e46daa20e61e5cd68ecc7ddf301a441a4b34a908bfdd660ee"} Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.286914 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4ab6492-4428-4a47-8240-b2edcae69fce","Type":"ContainerStarted","Data":"d0d3b69b58acc331303d9b11dd53dbfa9367869ec77daac3a61b249f9cf41e1d"} Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.286986 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4ab6492-4428-4a47-8240-b2edcae69fce","Type":"ContainerStarted","Data":"f1bec1b683ce149ef16ae3261107ac1d71bdaf45d1eeab27cf33adf0b8e3b5ac"} Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.287838 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.333161 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.448312 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.448275425 podStartE2EDuration="2.448275425s" podCreationTimestamp="2025-06-06 09:33:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:50.440264088 +0000 UTC m=+1241.715689631" watchObservedRunningTime="2025-06-06 09:33:50.448275425 +0000 UTC m=+1241.723700968" Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.474722 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b957c86d9-z2kmd"] Jun 06 09:33:50 crc kubenswrapper[4911]: I0606 09:33:50.474992 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" containerName="dnsmasq-dns" containerID="cri-o://b7d533553b87a1be2a974258c757c73bc1f2f166ced6c21ce80c2cf802d17c37" gracePeriod=10 Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.081298 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.081387 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.302779 4911 generic.go:334] "Generic (PLEG): container finished" podID="1f5dd978-318c-4d2d-88f4-7c4b01712832" containerID="08ee614aae402d43cee536e54185e90f0702e570df1f08c863aa74b598aafdb9" exitCode=0 Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.302861 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" event={"ID":"1f5dd978-318c-4d2d-88f4-7c4b01712832","Type":"ContainerDied","Data":"08ee614aae402d43cee536e54185e90f0702e570df1f08c863aa74b598aafdb9"} Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.308270 4911 generic.go:334] "Generic (PLEG): container finished" podID="522d16af-0c42-407e-8a26-115443d6e0fa" containerID="b7d533553b87a1be2a974258c757c73bc1f2f166ced6c21ce80c2cf802d17c37" exitCode=0 Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.308520 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" event={"ID":"522d16af-0c42-407e-8a26-115443d6e0fa","Type":"ContainerDied","Data":"b7d533553b87a1be2a974258c757c73bc1f2f166ced6c21ce80c2cf802d17c37"} Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.776655 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.837797 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-config\") pod \"522d16af-0c42-407e-8a26-115443d6e0fa\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.837940 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-svc\") pod \"522d16af-0c42-407e-8a26-115443d6e0fa\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.838001 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-swift-storage-0\") pod \"522d16af-0c42-407e-8a26-115443d6e0fa\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.838066 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-nb\") pod \"522d16af-0c42-407e-8a26-115443d6e0fa\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.838151 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2w9n\" (UniqueName: \"kubernetes.io/projected/522d16af-0c42-407e-8a26-115443d6e0fa-kube-api-access-q2w9n\") pod \"522d16af-0c42-407e-8a26-115443d6e0fa\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.838252 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-sb\") pod \"522d16af-0c42-407e-8a26-115443d6e0fa\" (UID: \"522d16af-0c42-407e-8a26-115443d6e0fa\") " Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.848669 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/522d16af-0c42-407e-8a26-115443d6e0fa-kube-api-access-q2w9n" (OuterVolumeSpecName: "kube-api-access-q2w9n") pod "522d16af-0c42-407e-8a26-115443d6e0fa" (UID: "522d16af-0c42-407e-8a26-115443d6e0fa"). InnerVolumeSpecName "kube-api-access-q2w9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.939928 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-config" (OuterVolumeSpecName: "config") pod "522d16af-0c42-407e-8a26-115443d6e0fa" (UID: "522d16af-0c42-407e-8a26-115443d6e0fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.940428 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.940464 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2w9n\" (UniqueName: \"kubernetes.io/projected/522d16af-0c42-407e-8a26-115443d6e0fa-kube-api-access-q2w9n\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.945394 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "522d16af-0c42-407e-8a26-115443d6e0fa" (UID: "522d16af-0c42-407e-8a26-115443d6e0fa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.965141 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "522d16af-0c42-407e-8a26-115443d6e0fa" (UID: "522d16af-0c42-407e-8a26-115443d6e0fa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.981539 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "522d16af-0c42-407e-8a26-115443d6e0fa" (UID: "522d16af-0c42-407e-8a26-115443d6e0fa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:33:51 crc kubenswrapper[4911]: I0606 09:33:51.991752 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "522d16af-0c42-407e-8a26-115443d6e0fa" (UID: "522d16af-0c42-407e-8a26-115443d6e0fa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.042479 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.042523 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.042537 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.042550 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/522d16af-0c42-407e-8a26-115443d6e0fa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.321890 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" event={"ID":"522d16af-0c42-407e-8a26-115443d6e0fa","Type":"ContainerDied","Data":"6446a915015b1790f3521e496b77c961a6416ce8e8c9b525998eb979db6f8437"} Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.321991 4911 scope.go:117] "RemoveContainer" containerID="b7d533553b87a1be2a974258c757c73bc1f2f166ced6c21ce80c2cf802d17c37" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.321937 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.384042 4911 scope.go:117] "RemoveContainer" containerID="c7627ce5d4a1ded581f26985bd0106a03e7c83defaff24ddcfbb7ecb2c2ffbc9" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.384213 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b957c86d9-z2kmd"] Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.396616 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b957c86d9-z2kmd"] Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.411254 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.449716 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-scripts\") pod \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.449773 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64g4g\" (UniqueName: \"kubernetes.io/projected/ca5e53cd-23ba-4460-80b0-4c2cca13773e-kube-api-access-64g4g\") pod \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.449815 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-combined-ca-bundle\") pod \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.449929 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-config-data\") pod \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\" (UID: \"ca5e53cd-23ba-4460-80b0-4c2cca13773e\") " Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.456894 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-scripts" (OuterVolumeSpecName: "scripts") pod "ca5e53cd-23ba-4460-80b0-4c2cca13773e" (UID: "ca5e53cd-23ba-4460-80b0-4c2cca13773e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.457258 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca5e53cd-23ba-4460-80b0-4c2cca13773e-kube-api-access-64g4g" (OuterVolumeSpecName: "kube-api-access-64g4g") pod "ca5e53cd-23ba-4460-80b0-4c2cca13773e" (UID: "ca5e53cd-23ba-4460-80b0-4c2cca13773e"). InnerVolumeSpecName "kube-api-access-64g4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.485595 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-config-data" (OuterVolumeSpecName: "config-data") pod "ca5e53cd-23ba-4460-80b0-4c2cca13773e" (UID: "ca5e53cd-23ba-4460-80b0-4c2cca13773e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.488059 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca5e53cd-23ba-4460-80b0-4c2cca13773e" (UID: "ca5e53cd-23ba-4460-80b0-4c2cca13773e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.552170 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.552205 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64g4g\" (UniqueName: \"kubernetes.io/projected/ca5e53cd-23ba-4460-80b0-4c2cca13773e-kube-api-access-64g4g\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.552221 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:52 crc kubenswrapper[4911]: I0606 09:33:52.552232 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca5e53cd-23ba-4460-80b0-4c2cca13773e-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.309582 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.338798 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.338780 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-9fkt8" event={"ID":"1f5dd978-318c-4d2d-88f4-7c4b01712832","Type":"ContainerDied","Data":"a79aacaa20f80e37e3ba488e41ae9813c68900e09bd97e6ffe12c05aad5ebaaf"} Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.339027 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a79aacaa20f80e37e3ba488e41ae9813c68900e09bd97e6ffe12c05aad5ebaaf" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.342344 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hbh54" event={"ID":"ca5e53cd-23ba-4460-80b0-4c2cca13773e","Type":"ContainerDied","Data":"162fa8275da1373a654fdb38555f99c569ede84dacee3ca6aea7cf39806e9abc"} Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.342391 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="162fa8275da1373a654fdb38555f99c569ede84dacee3ca6aea7cf39806e9abc" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.342422 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hbh54" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.368283 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-combined-ca-bundle\") pod \"1f5dd978-318c-4d2d-88f4-7c4b01712832\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.368411 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data\") pod \"1f5dd978-318c-4d2d-88f4-7c4b01712832\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.368452 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59xw7\" (UniqueName: \"kubernetes.io/projected/1f5dd978-318c-4d2d-88f4-7c4b01712832-kube-api-access-59xw7\") pod \"1f5dd978-318c-4d2d-88f4-7c4b01712832\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.368485 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-scripts\") pod \"1f5dd978-318c-4d2d-88f4-7c4b01712832\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.383834 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f5dd978-318c-4d2d-88f4-7c4b01712832-kube-api-access-59xw7" (OuterVolumeSpecName: "kube-api-access-59xw7") pod "1f5dd978-318c-4d2d-88f4-7c4b01712832" (UID: "1f5dd978-318c-4d2d-88f4-7c4b01712832"). InnerVolumeSpecName "kube-api-access-59xw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.416370 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-scripts" (OuterVolumeSpecName: "scripts") pod "1f5dd978-318c-4d2d-88f4-7c4b01712832" (UID: "1f5dd978-318c-4d2d-88f4-7c4b01712832"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.422599 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jun 06 09:33:53 crc kubenswrapper[4911]: E0606 09:33:53.423083 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca5e53cd-23ba-4460-80b0-4c2cca13773e" containerName="nova-manage" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.423208 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca5e53cd-23ba-4460-80b0-4c2cca13773e" containerName="nova-manage" Jun 06 09:33:53 crc kubenswrapper[4911]: E0606 09:33:53.423235 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" containerName="init" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.423244 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" containerName="init" Jun 06 09:33:53 crc kubenswrapper[4911]: E0606 09:33:53.423285 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f5dd978-318c-4d2d-88f4-7c4b01712832" containerName="nova-cell1-conductor-db-sync" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.423294 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f5dd978-318c-4d2d-88f4-7c4b01712832" containerName="nova-cell1-conductor-db-sync" Jun 06 09:33:53 crc kubenswrapper[4911]: E0606 09:33:53.423308 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" containerName="dnsmasq-dns" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.423314 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" containerName="dnsmasq-dns" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.423496 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca5e53cd-23ba-4460-80b0-4c2cca13773e" containerName="nova-manage" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.423524 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f5dd978-318c-4d2d-88f4-7c4b01712832" containerName="nova-cell1-conductor-db-sync" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.423536 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" containerName="dnsmasq-dns" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.424500 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: E0606 09:33:53.427332 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data podName:1f5dd978-318c-4d2d-88f4-7c4b01712832 nodeName:}" failed. No retries permitted until 2025-06-06 09:33:53.927303312 +0000 UTC m=+1245.202728855 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data") pod "1f5dd978-318c-4d2d-88f4-7c4b01712832" (UID: "1f5dd978-318c-4d2d-88f4-7c4b01712832") : error deleting /var/lib/kubelet/pods/1f5dd978-318c-4d2d-88f4-7c4b01712832/volume-subpaths: remove /var/lib/kubelet/pods/1f5dd978-318c-4d2d-88f4-7c4b01712832/volume-subpaths: no such file or directory Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.433232 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.440350 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f5dd978-318c-4d2d-88f4-7c4b01712832" (UID: "1f5dd978-318c-4d2d-88f4-7c4b01712832"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.472394 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.472546 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb68x\" (UniqueName: \"kubernetes.io/projected/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-kube-api-access-lb68x\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.472583 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.472731 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59xw7\" (UniqueName: \"kubernetes.io/projected/1f5dd978-318c-4d2d-88f4-7c4b01712832-kube-api-access-59xw7\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.472743 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.472754 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.574766 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.574955 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb68x\" (UniqueName: \"kubernetes.io/projected/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-kube-api-access-lb68x\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.574994 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.582422 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.588356 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.596972 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb68x\" (UniqueName: \"kubernetes.io/projected/e8e14edf-bb25-4e67-87f2-d2a6e7f90b89-kube-api-access-lb68x\") pod \"nova-cell1-conductor-0\" (UID: \"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89\") " pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.601552 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.601797 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-log" containerID="cri-o://f2d7fda9784d762457a6116548a8bc38635efbc7544662e0202bf0bab5e6dadc" gracePeriod=30 Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.602323 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-api" containerID="cri-o://411d0b79fc69a3e4de6134f8801ff98c6a123aa27dc73a8b79f433ab745b1705" gracePeriod=30 Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.615114 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.615341 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="16811511-12af-4ab9-9a29-dcfed29b0f25" containerName="nova-scheduler-scheduler" containerID="cri-o://48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592" gracePeriod=30 Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.624886 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.625402 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-log" containerID="cri-o://f1bec1b683ce149ef16ae3261107ac1d71bdaf45d1eeab27cf33adf0b8e3b5ac" gracePeriod=30 Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.625704 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-metadata" containerID="cri-o://d0d3b69b58acc331303d9b11dd53dbfa9367869ec77daac3a61b249f9cf41e1d" gracePeriod=30 Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.669670 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.669731 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.814192 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.961870 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" path="/var/lib/kubelet/pods/522d16af-0c42-407e-8a26-115443d6e0fa/volumes" Jun 06 09:33:53 crc kubenswrapper[4911]: I0606 09:33:53.994046 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data\") pod \"1f5dd978-318c-4d2d-88f4-7c4b01712832\" (UID: \"1f5dd978-318c-4d2d-88f4-7c4b01712832\") " Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.000477 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data" (OuterVolumeSpecName: "config-data") pod "1f5dd978-318c-4d2d-88f4-7c4b01712832" (UID: "1f5dd978-318c-4d2d-88f4-7c4b01712832"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.171073 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f5dd978-318c-4d2d-88f4-7c4b01712832-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.356042 4911 generic.go:334] "Generic (PLEG): container finished" podID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerID="f2d7fda9784d762457a6116548a8bc38635efbc7544662e0202bf0bab5e6dadc" exitCode=143 Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.356898 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"767fea6a-8b05-4ce5-a498-1b6fbc4aca86","Type":"ContainerDied","Data":"f2d7fda9784d762457a6116548a8bc38635efbc7544662e0202bf0bab5e6dadc"} Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.360115 4911 generic.go:334] "Generic (PLEG): container finished" podID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerID="d0d3b69b58acc331303d9b11dd53dbfa9367869ec77daac3a61b249f9cf41e1d" exitCode=0 Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.360154 4911 generic.go:334] "Generic (PLEG): container finished" podID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerID="f1bec1b683ce149ef16ae3261107ac1d71bdaf45d1eeab27cf33adf0b8e3b5ac" exitCode=143 Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.360178 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4ab6492-4428-4a47-8240-b2edcae69fce","Type":"ContainerDied","Data":"d0d3b69b58acc331303d9b11dd53dbfa9367869ec77daac3a61b249f9cf41e1d"} Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.360205 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4ab6492-4428-4a47-8240-b2edcae69fce","Type":"ContainerDied","Data":"f1bec1b683ce149ef16ae3261107ac1d71bdaf45d1eeab27cf33adf0b8e3b5ac"} Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.755323 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jun 06 09:33:54 crc kubenswrapper[4911]: E0606 09:33:54.915945 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jun 06 09:33:54 crc kubenswrapper[4911]: E0606 09:33:54.917249 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jun 06 09:33:54 crc kubenswrapper[4911]: E0606 09:33:54.918964 4911 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jun 06 09:33:54 crc kubenswrapper[4911]: E0606 09:33:54.919034 4911 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="16811511-12af-4ab9-9a29-dcfed29b0f25" containerName="nova-scheduler-scheduler" Jun 06 09:33:54 crc kubenswrapper[4911]: I0606 09:33:54.937896 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.091063 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2z7d\" (UniqueName: \"kubernetes.io/projected/b4ab6492-4428-4a47-8240-b2edcae69fce-kube-api-access-g2z7d\") pod \"b4ab6492-4428-4a47-8240-b2edcae69fce\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.091169 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4ab6492-4428-4a47-8240-b2edcae69fce-logs\") pod \"b4ab6492-4428-4a47-8240-b2edcae69fce\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.091198 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-config-data\") pod \"b4ab6492-4428-4a47-8240-b2edcae69fce\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.091274 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-nova-metadata-tls-certs\") pod \"b4ab6492-4428-4a47-8240-b2edcae69fce\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.091307 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-combined-ca-bundle\") pod \"b4ab6492-4428-4a47-8240-b2edcae69fce\" (UID: \"b4ab6492-4428-4a47-8240-b2edcae69fce\") " Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.092139 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4ab6492-4428-4a47-8240-b2edcae69fce-logs" (OuterVolumeSpecName: "logs") pod "b4ab6492-4428-4a47-8240-b2edcae69fce" (UID: "b4ab6492-4428-4a47-8240-b2edcae69fce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.107190 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4ab6492-4428-4a47-8240-b2edcae69fce-kube-api-access-g2z7d" (OuterVolumeSpecName: "kube-api-access-g2z7d") pod "b4ab6492-4428-4a47-8240-b2edcae69fce" (UID: "b4ab6492-4428-4a47-8240-b2edcae69fce"). InnerVolumeSpecName "kube-api-access-g2z7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.134582 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-config-data" (OuterVolumeSpecName: "config-data") pod "b4ab6492-4428-4a47-8240-b2edcae69fce" (UID: "b4ab6492-4428-4a47-8240-b2edcae69fce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.143175 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b4ab6492-4428-4a47-8240-b2edcae69fce" (UID: "b4ab6492-4428-4a47-8240-b2edcae69fce"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.148310 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4ab6492-4428-4a47-8240-b2edcae69fce" (UID: "b4ab6492-4428-4a47-8240-b2edcae69fce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.195241 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.195282 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2z7d\" (UniqueName: \"kubernetes.io/projected/b4ab6492-4428-4a47-8240-b2edcae69fce-kube-api-access-g2z7d\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.195295 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4ab6492-4428-4a47-8240-b2edcae69fce-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.195303 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.195311 4911 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4ab6492-4428-4a47-8240-b2edcae69fce-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.449910 4911 generic.go:334] "Generic (PLEG): container finished" podID="16811511-12af-4ab9-9a29-dcfed29b0f25" containerID="48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592" exitCode=0 Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.449950 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16811511-12af-4ab9-9a29-dcfed29b0f25","Type":"ContainerDied","Data":"48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592"} Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.452279 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89","Type":"ContainerStarted","Data":"ecac32619747b62355952f9867c6a87d75bc20f25b012642952f453e55d0a22a"} Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.452322 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e8e14edf-bb25-4e67-87f2-d2a6e7f90b89","Type":"ContainerStarted","Data":"785a0d721f1fec579c260a03edc1c61553e8b18675b6a41f8180049b49aaf2cc"} Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.452478 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.454861 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4ab6492-4428-4a47-8240-b2edcae69fce","Type":"ContainerDied","Data":"c6b4c970c22f3b3ad8cc62f281463bfd84253920506514837d883dd4a267c3a0"} Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.454907 4911 scope.go:117] "RemoveContainer" containerID="d0d3b69b58acc331303d9b11dd53dbfa9367869ec77daac3a61b249f9cf41e1d" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.454938 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.475734 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.475712405 podStartE2EDuration="2.475712405s" podCreationTimestamp="2025-06-06 09:33:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:55.470389448 +0000 UTC m=+1246.745814991" watchObservedRunningTime="2025-06-06 09:33:55.475712405 +0000 UTC m=+1246.751137948" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.509057 4911 scope.go:117] "RemoveContainer" containerID="f1bec1b683ce149ef16ae3261107ac1d71bdaf45d1eeab27cf33adf0b8e3b5ac" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.517276 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.542918 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.560532 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:55 crc kubenswrapper[4911]: E0606 09:33:55.561378 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-log" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.561399 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-log" Jun 06 09:33:55 crc kubenswrapper[4911]: E0606 09:33:55.561445 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-metadata" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.561456 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-metadata" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.561696 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-metadata" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.561727 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" containerName="nova-metadata-log" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.563312 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.569183 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.594021 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.600302 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.703989 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.704066 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phc9g\" (UniqueName: \"kubernetes.io/projected/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-kube-api-access-phc9g\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.704127 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.704203 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-config-data\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.704246 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-logs\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.805895 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.805980 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.806004 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phc9g\" (UniqueName: \"kubernetes.io/projected/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-kube-api-access-phc9g\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.806062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-config-data\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.806118 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-logs\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.806607 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-logs\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.813913 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-config-data\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.817879 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.818039 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.823812 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phc9g\" (UniqueName: \"kubernetes.io/projected/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-kube-api-access-phc9g\") pod \"nova-metadata-0\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.886263 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:33:55 crc kubenswrapper[4911]: I0606 09:33:55.969698 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4ab6492-4428-4a47-8240-b2edcae69fce" path="/var/lib/kubelet/pods/b4ab6492-4428-4a47-8240-b2edcae69fce/volumes" Jun 06 09:33:56 crc kubenswrapper[4911]: W0606 09:33:56.419555 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc47e2eda_3e8b_4a88_af7a_c0ea0e27b3d6.slice/crio-31efb5722b2557153201180dc65cee56fb4957fd1580b254bc4497847c8eb88c WatchSource:0}: Error finding container 31efb5722b2557153201180dc65cee56fb4957fd1580b254bc4497847c8eb88c: Status 404 returned error can't find the container with id 31efb5722b2557153201180dc65cee56fb4957fd1580b254bc4497847c8eb88c Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.421951 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.472175 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16811511-12af-4ab9-9a29-dcfed29b0f25","Type":"ContainerDied","Data":"63f4ba02671ec0d1580b577c53c04595e95770b779455957f847f75ca4026d40"} Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.472229 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63f4ba02671ec0d1580b577c53c04595e95770b779455957f847f75ca4026d40" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.472425 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.476118 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6","Type":"ContainerStarted","Data":"31efb5722b2557153201180dc65cee56fb4957fd1580b254bc4497847c8eb88c"} Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.729835 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b957c86d9-z2kmd" podUID="522d16af-0c42-407e-8a26-115443d6e0fa" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.168:5353: i/o timeout" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.745047 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-config-data\") pod \"16811511-12af-4ab9-9a29-dcfed29b0f25\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.745600 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46hlc\" (UniqueName: \"kubernetes.io/projected/16811511-12af-4ab9-9a29-dcfed29b0f25-kube-api-access-46hlc\") pod \"16811511-12af-4ab9-9a29-dcfed29b0f25\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.745724 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-combined-ca-bundle\") pod \"16811511-12af-4ab9-9a29-dcfed29b0f25\" (UID: \"16811511-12af-4ab9-9a29-dcfed29b0f25\") " Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.750966 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16811511-12af-4ab9-9a29-dcfed29b0f25-kube-api-access-46hlc" (OuterVolumeSpecName: "kube-api-access-46hlc") pod "16811511-12af-4ab9-9a29-dcfed29b0f25" (UID: "16811511-12af-4ab9-9a29-dcfed29b0f25"). InnerVolumeSpecName "kube-api-access-46hlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.780518 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-config-data" (OuterVolumeSpecName: "config-data") pod "16811511-12af-4ab9-9a29-dcfed29b0f25" (UID: "16811511-12af-4ab9-9a29-dcfed29b0f25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.785265 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16811511-12af-4ab9-9a29-dcfed29b0f25" (UID: "16811511-12af-4ab9-9a29-dcfed29b0f25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.848199 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.848243 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46hlc\" (UniqueName: \"kubernetes.io/projected/16811511-12af-4ab9-9a29-dcfed29b0f25-kube-api-access-46hlc\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:56 crc kubenswrapper[4911]: I0606 09:33:56.848256 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16811511-12af-4ab9-9a29-dcfed29b0f25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.491610 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6","Type":"ContainerStarted","Data":"c8b536c9125d17e82a55ab08857cfcb8cd6e6fd8855ab8bf66c43a903e9f4108"} Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.491942 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6","Type":"ContainerStarted","Data":"95f5747a9a073359a385c0831806e5f4fdebe51a44bb017f01fdef4c3490f928"} Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.494425 4911 generic.go:334] "Generic (PLEG): container finished" podID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerID="411d0b79fc69a3e4de6134f8801ff98c6a123aa27dc73a8b79f433ab745b1705" exitCode=0 Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.494549 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.494598 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"767fea6a-8b05-4ce5-a498-1b6fbc4aca86","Type":"ContainerDied","Data":"411d0b79fc69a3e4de6134f8801ff98c6a123aa27dc73a8b79f433ab745b1705"} Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.516645 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.516619286 podStartE2EDuration="2.516619286s" podCreationTimestamp="2025-06-06 09:33:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:57.510337194 +0000 UTC m=+1248.785762747" watchObservedRunningTime="2025-06-06 09:33:57.516619286 +0000 UTC m=+1248.792044829" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.550740 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.567747 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.579164 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:57 crc kubenswrapper[4911]: E0606 09:33:57.579684 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16811511-12af-4ab9-9a29-dcfed29b0f25" containerName="nova-scheduler-scheduler" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.579784 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="16811511-12af-4ab9-9a29-dcfed29b0f25" containerName="nova-scheduler-scheduler" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.579979 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="16811511-12af-4ab9-9a29-dcfed29b0f25" containerName="nova-scheduler-scheduler" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.580800 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.583032 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.590737 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.668741 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-config-data\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.668821 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4vtb\" (UniqueName: \"kubernetes.io/projected/16c16194-2433-4e69-b40e-4a1f3d43795e-kube-api-access-d4vtb\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.669104 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.752622 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.770729 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.771834 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-config-data\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.771886 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4vtb\" (UniqueName: \"kubernetes.io/projected/16c16194-2433-4e69-b40e-4a1f3d43795e-kube-api-access-d4vtb\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.777894 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.780069 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-config-data\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.796985 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4vtb\" (UniqueName: \"kubernetes.io/projected/16c16194-2433-4e69-b40e-4a1f3d43795e-kube-api-access-d4vtb\") pod \"nova-scheduler-0\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.875082 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-logs\") pod \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.875158 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5l4c\" (UniqueName: \"kubernetes.io/projected/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-kube-api-access-p5l4c\") pod \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.875225 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-combined-ca-bundle\") pod \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.875396 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-config-data\") pod \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\" (UID: \"767fea6a-8b05-4ce5-a498-1b6fbc4aca86\") " Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.875718 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-logs" (OuterVolumeSpecName: "logs") pod "767fea6a-8b05-4ce5-a498-1b6fbc4aca86" (UID: "767fea6a-8b05-4ce5-a498-1b6fbc4aca86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.876541 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.882403 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-kube-api-access-p5l4c" (OuterVolumeSpecName: "kube-api-access-p5l4c") pod "767fea6a-8b05-4ce5-a498-1b6fbc4aca86" (UID: "767fea6a-8b05-4ce5-a498-1b6fbc4aca86"). InnerVolumeSpecName "kube-api-access-p5l4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.902038 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.902485 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "767fea6a-8b05-4ce5-a498-1b6fbc4aca86" (UID: "767fea6a-8b05-4ce5-a498-1b6fbc4aca86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.917704 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-config-data" (OuterVolumeSpecName: "config-data") pod "767fea6a-8b05-4ce5-a498-1b6fbc4aca86" (UID: "767fea6a-8b05-4ce5-a498-1b6fbc4aca86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.966616 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16811511-12af-4ab9-9a29-dcfed29b0f25" path="/var/lib/kubelet/pods/16811511-12af-4ab9-9a29-dcfed29b0f25/volumes" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.979064 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5l4c\" (UniqueName: \"kubernetes.io/projected/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-kube-api-access-p5l4c\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.979130 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:57 crc kubenswrapper[4911]: I0606 09:33:57.979144 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/767fea6a-8b05-4ce5-a498-1b6fbc4aca86-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:33:58 crc kubenswrapper[4911]: W0606 09:33:58.353772 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16c16194_2433_4e69_b40e_4a1f3d43795e.slice/crio-6f9ed28323d7241bc758509049f249f183f3bf9bbe20afd4a6fee18cba5fe304 WatchSource:0}: Error finding container 6f9ed28323d7241bc758509049f249f183f3bf9bbe20afd4a6fee18cba5fe304: Status 404 returned error can't find the container with id 6f9ed28323d7241bc758509049f249f183f3bf9bbe20afd4a6fee18cba5fe304 Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.356662 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.510008 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"767fea6a-8b05-4ce5-a498-1b6fbc4aca86","Type":"ContainerDied","Data":"9ed01fb27f6c34db495a3858d9883d1ba48cc09501b3f6dc3c8a680f7433e466"} Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.510368 4911 scope.go:117] "RemoveContainer" containerID="411d0b79fc69a3e4de6134f8801ff98c6a123aa27dc73a8b79f433ab745b1705" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.510522 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.514066 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16c16194-2433-4e69-b40e-4a1f3d43795e","Type":"ContainerStarted","Data":"6f9ed28323d7241bc758509049f249f183f3bf9bbe20afd4a6fee18cba5fe304"} Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.532785 4911 scope.go:117] "RemoveContainer" containerID="f2d7fda9784d762457a6116548a8bc38635efbc7544662e0202bf0bab5e6dadc" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.534288 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.545868 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.563359 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:58 crc kubenswrapper[4911]: E0606 09:33:58.563941 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-api" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.563971 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-api" Jun 06 09:33:58 crc kubenswrapper[4911]: E0606 09:33:58.564010 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-log" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.564020 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-log" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.564293 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-log" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.564328 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" containerName="nova-api-api" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.566364 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.568580 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.591816 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.591905 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-logs\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.591981 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-config-data\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.592257 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9529h\" (UniqueName: \"kubernetes.io/projected/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-kube-api-access-9529h\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.594028 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.696453 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.696515 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-logs\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.696561 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-config-data\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.696665 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9529h\" (UniqueName: \"kubernetes.io/projected/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-kube-api-access-9529h\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.698505 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-logs\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.709416 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-config-data\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.714882 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.724067 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9529h\" (UniqueName: \"kubernetes.io/projected/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-kube-api-access-9529h\") pod \"nova-api-0\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " pod="openstack/nova-api-0" Jun 06 09:33:58 crc kubenswrapper[4911]: I0606 09:33:58.886112 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:33:59 crc kubenswrapper[4911]: I0606 09:33:59.373449 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:33:59 crc kubenswrapper[4911]: W0606 09:33:59.384905 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ceca2bc_e3b2_4914_ae37_5ca1438412bb.slice/crio-3162f6754dc939f5cfec9482943e71311d6d211757c6d6f30970b1fd4c150fb1 WatchSource:0}: Error finding container 3162f6754dc939f5cfec9482943e71311d6d211757c6d6f30970b1fd4c150fb1: Status 404 returned error can't find the container with id 3162f6754dc939f5cfec9482943e71311d6d211757c6d6f30970b1fd4c150fb1 Jun 06 09:33:59 crc kubenswrapper[4911]: I0606 09:33:59.527215 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16c16194-2433-4e69-b40e-4a1f3d43795e","Type":"ContainerStarted","Data":"28466fac393f2dc6acbc3dfdd4e82c243a01ded0343b6c0ea47bdd77a696599a"} Jun 06 09:33:59 crc kubenswrapper[4911]: I0606 09:33:59.528191 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9ceca2bc-e3b2-4914-ae37-5ca1438412bb","Type":"ContainerStarted","Data":"3162f6754dc939f5cfec9482943e71311d6d211757c6d6f30970b1fd4c150fb1"} Jun 06 09:33:59 crc kubenswrapper[4911]: I0606 09:33:59.547023 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.546993585 podStartE2EDuration="2.546993585s" podCreationTimestamp="2025-06-06 09:33:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:33:59.541203246 +0000 UTC m=+1250.816628799" watchObservedRunningTime="2025-06-06 09:33:59.546993585 +0000 UTC m=+1250.822419128" Jun 06 09:33:59 crc kubenswrapper[4911]: I0606 09:33:59.969835 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="767fea6a-8b05-4ce5-a498-1b6fbc4aca86" path="/var/lib/kubelet/pods/767fea6a-8b05-4ce5-a498-1b6fbc4aca86/volumes" Jun 06 09:34:00 crc kubenswrapper[4911]: I0606 09:34:00.539482 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9ceca2bc-e3b2-4914-ae37-5ca1438412bb","Type":"ContainerStarted","Data":"6c72ba4a15ab646a72a7ab2118a821823cbe4f39eb676d898d728415ed35cfb2"} Jun 06 09:34:00 crc kubenswrapper[4911]: I0606 09:34:00.539568 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9ceca2bc-e3b2-4914-ae37-5ca1438412bb","Type":"ContainerStarted","Data":"a1d1c348b355fc0855220745c7b5109d87f876f5f33b715ab31a7204a5844756"} Jun 06 09:34:00 crc kubenswrapper[4911]: I0606 09:34:00.562959 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.5629407889999998 podStartE2EDuration="2.562940789s" podCreationTimestamp="2025-06-06 09:33:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:00.556308188 +0000 UTC m=+1251.831733741" watchObservedRunningTime="2025-06-06 09:34:00.562940789 +0000 UTC m=+1251.838366332" Jun 06 09:34:00 crc kubenswrapper[4911]: I0606 09:34:00.887581 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jun 06 09:34:00 crc kubenswrapper[4911]: I0606 09:34:00.887979 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.181739 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-cl8xn"] Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.183015 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.263389 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4234050e-8684-4fc1-b511-cc82e005d13e-host\") pod \"crc-debug-cl8xn\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.263802 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qf74\" (UniqueName: \"kubernetes.io/projected/4234050e-8684-4fc1-b511-cc82e005d13e-kube-api-access-9qf74\") pod \"crc-debug-cl8xn\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.366350 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4234050e-8684-4fc1-b511-cc82e005d13e-host\") pod \"crc-debug-cl8xn\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.366663 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qf74\" (UniqueName: \"kubernetes.io/projected/4234050e-8684-4fc1-b511-cc82e005d13e-kube-api-access-9qf74\") pod \"crc-debug-cl8xn\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.366742 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4234050e-8684-4fc1-b511-cc82e005d13e-host\") pod \"crc-debug-cl8xn\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.390016 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qf74\" (UniqueName: \"kubernetes.io/projected/4234050e-8684-4fc1-b511-cc82e005d13e-kube-api-access-9qf74\") pod \"crc-debug-cl8xn\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.504862 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cl8xn" Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.556297 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-cl8xn" event={"ID":"4234050e-8684-4fc1-b511-cc82e005d13e","Type":"ContainerStarted","Data":"b6e6ff5ce2bb907592510a36a08d696c2078a40acfc50cc7ac4cc3a884c195c7"} Jun 06 09:34:02 crc kubenswrapper[4911]: I0606 09:34:02.902323 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jun 06 09:34:03 crc kubenswrapper[4911]: I0606 09:34:03.567650 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-cl8xn" event={"ID":"4234050e-8684-4fc1-b511-cc82e005d13e","Type":"ContainerStarted","Data":"79da7d22748fb85fb0a8107ff0751138961f51805da737687436276ac0b96a16"} Jun 06 09:34:03 crc kubenswrapper[4911]: I0606 09:34:03.585289 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-cl8xn" podStartSLOduration=1.5852654130000001 podStartE2EDuration="1.585265413s" podCreationTimestamp="2025-06-06 09:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:03.580368596 +0000 UTC m=+1254.855794139" watchObservedRunningTime="2025-06-06 09:34:03.585265413 +0000 UTC m=+1254.860690976" Jun 06 09:34:03 crc kubenswrapper[4911]: I0606 09:34:03.844058 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jun 06 09:34:04 crc kubenswrapper[4911]: I0606 09:34:04.860153 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-cl8xn"] Jun 06 09:34:04 crc kubenswrapper[4911]: I0606 09:34:04.872591 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-cl8xn"] Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.587250 4911 generic.go:334] "Generic (PLEG): container finished" podID="4234050e-8684-4fc1-b511-cc82e005d13e" containerID="79da7d22748fb85fb0a8107ff0751138961f51805da737687436276ac0b96a16" exitCode=0 Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.695486 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cl8xn" Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.828150 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qf74\" (UniqueName: \"kubernetes.io/projected/4234050e-8684-4fc1-b511-cc82e005d13e-kube-api-access-9qf74\") pod \"4234050e-8684-4fc1-b511-cc82e005d13e\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.828476 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4234050e-8684-4fc1-b511-cc82e005d13e-host\") pod \"4234050e-8684-4fc1-b511-cc82e005d13e\" (UID: \"4234050e-8684-4fc1-b511-cc82e005d13e\") " Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.828553 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4234050e-8684-4fc1-b511-cc82e005d13e-host" (OuterVolumeSpecName: "host") pod "4234050e-8684-4fc1-b511-cc82e005d13e" (UID: "4234050e-8684-4fc1-b511-cc82e005d13e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.828975 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4234050e-8684-4fc1-b511-cc82e005d13e-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.834554 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4234050e-8684-4fc1-b511-cc82e005d13e-kube-api-access-9qf74" (OuterVolumeSpecName: "kube-api-access-9qf74") pod "4234050e-8684-4fc1-b511-cc82e005d13e" (UID: "4234050e-8684-4fc1-b511-cc82e005d13e"). InnerVolumeSpecName "kube-api-access-9qf74". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.887437 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.887521 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.932167 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qf74\" (UniqueName: \"kubernetes.io/projected/4234050e-8684-4fc1-b511-cc82e005d13e-kube-api-access-9qf74\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:05 crc kubenswrapper[4911]: I0606 09:34:05.959177 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4234050e-8684-4fc1-b511-cc82e005d13e" path="/var/lib/kubelet/pods/4234050e-8684-4fc1-b511-cc82e005d13e/volumes" Jun 06 09:34:06 crc kubenswrapper[4911]: I0606 09:34:06.598713 4911 scope.go:117] "RemoveContainer" containerID="79da7d22748fb85fb0a8107ff0751138961f51805da737687436276ac0b96a16" Jun 06 09:34:06 crc kubenswrapper[4911]: I0606 09:34:06.599183 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cl8xn" Jun 06 09:34:06 crc kubenswrapper[4911]: I0606 09:34:06.897338 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:06 crc kubenswrapper[4911]: I0606 09:34:06.897896 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:07 crc kubenswrapper[4911]: I0606 09:34:07.903077 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jun 06 09:34:07 crc kubenswrapper[4911]: I0606 09:34:07.932596 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jun 06 09:34:08 crc kubenswrapper[4911]: I0606 09:34:08.639193 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jun 06 09:34:08 crc kubenswrapper[4911]: I0606 09:34:08.887022 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:34:08 crc kubenswrapper[4911]: I0606 09:34:08.887352 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:34:09 crc kubenswrapper[4911]: I0606 09:34:09.970377 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:09 crc kubenswrapper[4911]: I0606 09:34:09.970418 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:15 crc kubenswrapper[4911]: I0606 09:34:15.892517 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jun 06 09:34:15 crc kubenswrapper[4911]: I0606 09:34:15.894120 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jun 06 09:34:15 crc kubenswrapper[4911]: I0606 09:34:15.898543 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jun 06 09:34:16 crc kubenswrapper[4911]: I0606 09:34:16.703394 4911 generic.go:334] "Generic (PLEG): container finished" podID="994636bb-4927-4649-9032-3b3c28eb6289" containerID="ba30b079d6e62e5132e1054023728456e28350b9f907f726afbf7cc907007e0d" exitCode=137 Jun 06 09:34:16 crc kubenswrapper[4911]: I0606 09:34:16.703467 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"994636bb-4927-4649-9032-3b3c28eb6289","Type":"ContainerDied","Data":"ba30b079d6e62e5132e1054023728456e28350b9f907f726afbf7cc907007e0d"} Jun 06 09:34:16 crc kubenswrapper[4911]: I0606 09:34:16.708900 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.207019 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.279477 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-combined-ca-bundle\") pod \"994636bb-4927-4649-9032-3b3c28eb6289\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.279565 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-config-data\") pod \"994636bb-4927-4649-9032-3b3c28eb6289\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.279632 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9t7r\" (UniqueName: \"kubernetes.io/projected/994636bb-4927-4649-9032-3b3c28eb6289-kube-api-access-m9t7r\") pod \"994636bb-4927-4649-9032-3b3c28eb6289\" (UID: \"994636bb-4927-4649-9032-3b3c28eb6289\") " Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.287377 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/994636bb-4927-4649-9032-3b3c28eb6289-kube-api-access-m9t7r" (OuterVolumeSpecName: "kube-api-access-m9t7r") pod "994636bb-4927-4649-9032-3b3c28eb6289" (UID: "994636bb-4927-4649-9032-3b3c28eb6289"). InnerVolumeSpecName "kube-api-access-m9t7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.309736 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-config-data" (OuterVolumeSpecName: "config-data") pod "994636bb-4927-4649-9032-3b3c28eb6289" (UID: "994636bb-4927-4649-9032-3b3c28eb6289"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.310371 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "994636bb-4927-4649-9032-3b3c28eb6289" (UID: "994636bb-4927-4649-9032-3b3c28eb6289"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.382514 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.382556 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994636bb-4927-4649-9032-3b3c28eb6289-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.382566 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9t7r\" (UniqueName: \"kubernetes.io/projected/994636bb-4927-4649-9032-3b3c28eb6289-kube-api-access-m9t7r\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.713871 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"994636bb-4927-4649-9032-3b3c28eb6289","Type":"ContainerDied","Data":"6e2863968986bd4ab1affe4e3fb4bfe214b25ec743e473f5da28056aba624ae6"} Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.713914 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.713959 4911 scope.go:117] "RemoveContainer" containerID="ba30b079d6e62e5132e1054023728456e28350b9f907f726afbf7cc907007e0d" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.747176 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.757321 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.773932 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:34:17 crc kubenswrapper[4911]: E0606 09:34:17.774467 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994636bb-4927-4649-9032-3b3c28eb6289" containerName="nova-cell1-novncproxy-novncproxy" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.774487 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="994636bb-4927-4649-9032-3b3c28eb6289" containerName="nova-cell1-novncproxy-novncproxy" Jun 06 09:34:17 crc kubenswrapper[4911]: E0606 09:34:17.774521 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4234050e-8684-4fc1-b511-cc82e005d13e" containerName="container-00" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.774530 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4234050e-8684-4fc1-b511-cc82e005d13e" containerName="container-00" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.774719 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4234050e-8684-4fc1-b511-cc82e005d13e" containerName="container-00" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.774744 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="994636bb-4927-4649-9032-3b3c28eb6289" containerName="nova-cell1-novncproxy-novncproxy" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.775558 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.779867 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.779961 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.780574 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.787270 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.892768 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wfzx\" (UniqueName: \"kubernetes.io/projected/adbe049a-9472-4e11-a99d-32969a196e78-kube-api-access-6wfzx\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.892905 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.892955 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.893161 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.893478 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.960562 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="994636bb-4927-4649-9032-3b3c28eb6289" path="/var/lib/kubelet/pods/994636bb-4927-4649-9032-3b3c28eb6289/volumes" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.994912 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.995039 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.995177 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wfzx\" (UniqueName: \"kubernetes.io/projected/adbe049a-9472-4e11-a99d-32969a196e78-kube-api-access-6wfzx\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.995213 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.995229 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:17 crc kubenswrapper[4911]: I0606 09:34:17.999435 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.000138 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.000474 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.000482 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/adbe049a-9472-4e11-a99d-32969a196e78-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.014469 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wfzx\" (UniqueName: \"kubernetes.io/projected/adbe049a-9472-4e11-a99d-32969a196e78-kube-api-access-6wfzx\") pod \"nova-cell1-novncproxy-0\" (UID: \"adbe049a-9472-4e11-a99d-32969a196e78\") " pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.097947 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.536460 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jun 06 09:34:18 crc kubenswrapper[4911]: W0606 09:34:18.539042 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podadbe049a_9472_4e11_a99d_32969a196e78.slice/crio-34155574da159114b224268c67145e3ab1b46224255d4586bc8bdbdf4af5271b WatchSource:0}: Error finding container 34155574da159114b224268c67145e3ab1b46224255d4586bc8bdbdf4af5271b: Status 404 returned error can't find the container with id 34155574da159114b224268c67145e3ab1b46224255d4586bc8bdbdf4af5271b Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.726439 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"adbe049a-9472-4e11-a99d-32969a196e78","Type":"ContainerStarted","Data":"34155574da159114b224268c67145e3ab1b46224255d4586bc8bdbdf4af5271b"} Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.890556 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.890622 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.891214 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.891268 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.895231 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jun 06 09:34:18 crc kubenswrapper[4911]: I0606 09:34:18.897051 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.083350 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-748b4c9f7f-q2697"] Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.089703 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.101519 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-748b4c9f7f-q2697"] Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.226618 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndh7l\" (UniqueName: \"kubernetes.io/projected/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-kube-api-access-ndh7l\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.226696 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-swift-storage-0\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.226940 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-svc\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.227017 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-config\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.227144 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-nb\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.227196 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-sb\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.329164 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-nb\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.329220 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-sb\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.329325 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndh7l\" (UniqueName: \"kubernetes.io/projected/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-kube-api-access-ndh7l\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.329352 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-swift-storage-0\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.329423 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-svc\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.329444 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-config\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.330160 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-sb\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.330160 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-nb\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.330686 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-config\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.330711 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-svc\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.330968 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-swift-storage-0\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.356559 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndh7l\" (UniqueName: \"kubernetes.io/projected/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-kube-api-access-ndh7l\") pod \"dnsmasq-dns-748b4c9f7f-q2697\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.424782 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.753140 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"adbe049a-9472-4e11-a99d-32969a196e78","Type":"ContainerStarted","Data":"55de9585655a10c5d0086ae4342df3eb790eb9ad64df47488322f5a0acbaf76f"} Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.810073 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.8100471540000003 podStartE2EDuration="2.810047154s" podCreationTimestamp="2025-06-06 09:34:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:19.768868422 +0000 UTC m=+1271.044293955" watchObservedRunningTime="2025-06-06 09:34:19.810047154 +0000 UTC m=+1271.085472697" Jun 06 09:34:19 crc kubenswrapper[4911]: I0606 09:34:19.904160 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-748b4c9f7f-q2697"] Jun 06 09:34:19 crc kubenswrapper[4911]: W0606 09:34:19.906110 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93fa7ffc_cc42_4e89_8f38_a74170fcbcba.slice/crio-f6d47e4ffadbe3e4c515cae6e43970dba0802a8078738b95a1f911ebe4956f46 WatchSource:0}: Error finding container f6d47e4ffadbe3e4c515cae6e43970dba0802a8078738b95a1f911ebe4956f46: Status 404 returned error can't find the container with id f6d47e4ffadbe3e4c515cae6e43970dba0802a8078738b95a1f911ebe4956f46 Jun 06 09:34:20 crc kubenswrapper[4911]: I0606 09:34:20.764599 4911 generic.go:334] "Generic (PLEG): container finished" podID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerID="ab7be46a1af4dee0c92f0b635efd09e120d80e99d797de757f1391ab31c3dab2" exitCode=0 Jun 06 09:34:20 crc kubenswrapper[4911]: I0606 09:34:20.764698 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" event={"ID":"93fa7ffc-cc42-4e89-8f38-a74170fcbcba","Type":"ContainerDied","Data":"ab7be46a1af4dee0c92f0b635efd09e120d80e99d797de757f1391ab31c3dab2"} Jun 06 09:34:20 crc kubenswrapper[4911]: I0606 09:34:20.765077 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" event={"ID":"93fa7ffc-cc42-4e89-8f38-a74170fcbcba","Type":"ContainerStarted","Data":"f6d47e4ffadbe3e4c515cae6e43970dba0802a8078738b95a1f911ebe4956f46"} Jun 06 09:34:21 crc kubenswrapper[4911]: I0606 09:34:21.508978 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:21 crc kubenswrapper[4911]: I0606 09:34:21.776695 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-log" containerID="cri-o://a1d1c348b355fc0855220745c7b5109d87f876f5f33b715ab31a7204a5844756" gracePeriod=30 Jun 06 09:34:21 crc kubenswrapper[4911]: I0606 09:34:21.777775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" event={"ID":"93fa7ffc-cc42-4e89-8f38-a74170fcbcba","Type":"ContainerStarted","Data":"5c6d5d3181ca899b730c2c2c04e206c965e5875b93e3477e3a4904a0d160cc83"} Jun 06 09:34:21 crc kubenswrapper[4911]: I0606 09:34:21.777815 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:21 crc kubenswrapper[4911]: I0606 09:34:21.778259 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-api" containerID="cri-o://6c72ba4a15ab646a72a7ab2118a821823cbe4f39eb676d898d728415ed35cfb2" gracePeriod=30 Jun 06 09:34:21 crc kubenswrapper[4911]: I0606 09:34:21.811646 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" podStartSLOduration=2.811628741 podStartE2EDuration="2.811628741s" podCreationTimestamp="2025-06-06 09:34:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:21.797043785 +0000 UTC m=+1273.072469328" watchObservedRunningTime="2025-06-06 09:34:21.811628741 +0000 UTC m=+1273.087054284" Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.362108 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.362762 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-central-agent" containerID="cri-o://193854695dad61cd28031ca959496fb35970e298df6de7a80b9c033e618a029d" gracePeriod=30 Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.362823 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="proxy-httpd" containerID="cri-o://86a51c83ca2759180c63f61492dfa41a744a52fd5cdd3f08cd78d5798acf2ca8" gracePeriod=30 Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.362857 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-notification-agent" containerID="cri-o://31de43dbc2667d1d8e10cd2f5a1e4115329d50ae59209787fd17c0e6c4cfd900" gracePeriod=30 Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.362870 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="sg-core" containerID="cri-o://e3d551ccd655c88958ed221e4f631112bc911aefaa6fdf08a7d6d790b822eacc" gracePeriod=30 Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.787814 4911 generic.go:334] "Generic (PLEG): container finished" podID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerID="a1d1c348b355fc0855220745c7b5109d87f876f5f33b715ab31a7204a5844756" exitCode=143 Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.787872 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9ceca2bc-e3b2-4914-ae37-5ca1438412bb","Type":"ContainerDied","Data":"a1d1c348b355fc0855220745c7b5109d87f876f5f33b715ab31a7204a5844756"} Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.790603 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerID="86a51c83ca2759180c63f61492dfa41a744a52fd5cdd3f08cd78d5798acf2ca8" exitCode=0 Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.790631 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerID="e3d551ccd655c88958ed221e4f631112bc911aefaa6fdf08a7d6d790b822eacc" exitCode=2 Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.791492 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerDied","Data":"86a51c83ca2759180c63f61492dfa41a744a52fd5cdd3f08cd78d5798acf2ca8"} Jun 06 09:34:22 crc kubenswrapper[4911]: I0606 09:34:22.791523 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerDied","Data":"e3d551ccd655c88958ed221e4f631112bc911aefaa6fdf08a7d6d790b822eacc"} Jun 06 09:34:23 crc kubenswrapper[4911]: I0606 09:34:23.099285 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:23 crc kubenswrapper[4911]: I0606 09:34:23.801572 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerID="193854695dad61cd28031ca959496fb35970e298df6de7a80b9c033e618a029d" exitCode=0 Jun 06 09:34:23 crc kubenswrapper[4911]: I0606 09:34:23.801658 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerDied","Data":"193854695dad61cd28031ca959496fb35970e298df6de7a80b9c033e618a029d"} Jun 06 09:34:24 crc kubenswrapper[4911]: I0606 09:34:24.300986 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:34:24 crc kubenswrapper[4911]: I0606 09:34:24.301164 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:34:25 crc kubenswrapper[4911]: I0606 09:34:25.824827 4911 generic.go:334] "Generic (PLEG): container finished" podID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerID="6c72ba4a15ab646a72a7ab2118a821823cbe4f39eb676d898d728415ed35cfb2" exitCode=0 Jun 06 09:34:25 crc kubenswrapper[4911]: I0606 09:34:25.824982 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9ceca2bc-e3b2-4914-ae37-5ca1438412bb","Type":"ContainerDied","Data":"6c72ba4a15ab646a72a7ab2118a821823cbe4f39eb676d898d728415ed35cfb2"} Jun 06 09:34:25 crc kubenswrapper[4911]: I0606 09:34:25.829606 4911 generic.go:334] "Generic (PLEG): container finished" podID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerID="31de43dbc2667d1d8e10cd2f5a1e4115329d50ae59209787fd17c0e6c4cfd900" exitCode=0 Jun 06 09:34:25 crc kubenswrapper[4911]: I0606 09:34:25.829656 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerDied","Data":"31de43dbc2667d1d8e10cd2f5a1e4115329d50ae59209787fd17c0e6c4cfd900"} Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.133715 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.141439 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.311861 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-config-data\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.311992 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-log-httpd\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312015 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrndv\" (UniqueName: \"kubernetes.io/projected/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-kube-api-access-qrndv\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312083 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-ceilometer-tls-certs\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312141 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-scripts\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312181 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-config-data\") pod \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312233 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9529h\" (UniqueName: \"kubernetes.io/projected/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-kube-api-access-9529h\") pod \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312268 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-sg-core-conf-yaml\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312355 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-run-httpd\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312405 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-combined-ca-bundle\") pod \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\" (UID: \"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312429 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-combined-ca-bundle\") pod \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.312505 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-logs\") pod \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\" (UID: \"9ceca2bc-e3b2-4914-ae37-5ca1438412bb\") " Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.313116 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.313176 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.313587 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-logs" (OuterVolumeSpecName: "logs") pod "9ceca2bc-e3b2-4914-ae37-5ca1438412bb" (UID: "9ceca2bc-e3b2-4914-ae37-5ca1438412bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.320531 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-kube-api-access-qrndv" (OuterVolumeSpecName: "kube-api-access-qrndv") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "kube-api-access-qrndv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.340691 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-scripts" (OuterVolumeSpecName: "scripts") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.341241 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-kube-api-access-9529h" (OuterVolumeSpecName: "kube-api-access-9529h") pod "9ceca2bc-e3b2-4914-ae37-5ca1438412bb" (UID: "9ceca2bc-e3b2-4914-ae37-5ca1438412bb"). InnerVolumeSpecName "kube-api-access-9529h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.355817 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.374187 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ceca2bc-e3b2-4914-ae37-5ca1438412bb" (UID: "9ceca2bc-e3b2-4914-ae37-5ca1438412bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.381137 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-config-data" (OuterVolumeSpecName: "config-data") pod "9ceca2bc-e3b2-4914-ae37-5ca1438412bb" (UID: "9ceca2bc-e3b2-4914-ae37-5ca1438412bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.389342 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415706 4911 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-run-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415760 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415774 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415784 4911 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-log-httpd\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415796 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrndv\" (UniqueName: \"kubernetes.io/projected/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-kube-api-access-qrndv\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415810 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415823 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415834 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415846 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9529h\" (UniqueName: \"kubernetes.io/projected/9ceca2bc-e3b2-4914-ae37-5ca1438412bb-kube-api-access-9529h\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.415857 4911 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.444136 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.448384 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-config-data" (OuterVolumeSpecName: "config-data") pod "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" (UID: "d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.517992 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.518045 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.841533 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37","Type":"ContainerDied","Data":"2b2e65fde91f84e5e5537d1d4d28880ebf2df80e9ed7d39ae2b514e8cc958c59"} Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.841603 4911 scope.go:117] "RemoveContainer" containerID="86a51c83ca2759180c63f61492dfa41a744a52fd5cdd3f08cd78d5798acf2ca8" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.841605 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.844068 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9ceca2bc-e3b2-4914-ae37-5ca1438412bb","Type":"ContainerDied","Data":"3162f6754dc939f5cfec9482943e71311d6d211757c6d6f30970b1fd4c150fb1"} Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.844157 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.877357 4911 scope.go:117] "RemoveContainer" containerID="e3d551ccd655c88958ed221e4f631112bc911aefaa6fdf08a7d6d790b822eacc" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.909356 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.920526 4911 scope.go:117] "RemoveContainer" containerID="31de43dbc2667d1d8e10cd2f5a1e4115329d50ae59209787fd17c0e6c4cfd900" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.958458 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.970130 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:26 crc kubenswrapper[4911]: E0606 09:34:26.970610 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="sg-core" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.970630 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="sg-core" Jun 06 09:34:26 crc kubenswrapper[4911]: E0606 09:34:26.970645 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-central-agent" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.970651 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-central-agent" Jun 06 09:34:26 crc kubenswrapper[4911]: E0606 09:34:26.970800 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-notification-agent" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.970846 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-notification-agent" Jun 06 09:34:26 crc kubenswrapper[4911]: E0606 09:34:26.970853 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="proxy-httpd" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.970858 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="proxy-httpd" Jun 06 09:34:26 crc kubenswrapper[4911]: E0606 09:34:26.970898 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-log" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.970904 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-log" Jun 06 09:34:26 crc kubenswrapper[4911]: E0606 09:34:26.971118 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-api" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.971131 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-api" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.971330 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-central-agent" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.971348 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="proxy-httpd" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.971361 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-log" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.971371 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="ceilometer-notification-agent" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.971379 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" containerName="nova-api-api" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.971391 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" containerName="sg-core" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.972568 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.978661 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.978698 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.978883 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jun 06 09:34:26 crc kubenswrapper[4911]: I0606 09:34:26.997786 4911 scope.go:117] "RemoveContainer" containerID="193854695dad61cd28031ca959496fb35970e298df6de7a80b9c033e618a029d" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.005583 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.016906 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.025801 4911 scope.go:117] "RemoveContainer" containerID="6c72ba4a15ab646a72a7ab2118a821823cbe4f39eb676d898d728415ed35cfb2" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.027082 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.037580 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.040510 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.043624 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.043815 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.043975 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.048392 4911 scope.go:117] "RemoveContainer" containerID="a1d1c348b355fc0855220745c7b5109d87f876f5f33b715ab31a7204a5844756" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.048819 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136503 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-scripts\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136562 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-run-httpd\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-log-httpd\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136620 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136721 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfrmm\" (UniqueName: \"kubernetes.io/projected/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-kube-api-access-dfrmm\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136743 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136776 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136857 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-config-data\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136898 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136940 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-config-data\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.136973 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-logs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.137037 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cphg\" (UniqueName: \"kubernetes.io/projected/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-kube-api-access-6cphg\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.137191 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-public-tls-certs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.137584 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.239691 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.239812 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-scripts\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.239865 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-run-httpd\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.239896 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-log-httpd\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.239929 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.239970 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfrmm\" (UniqueName: \"kubernetes.io/projected/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-kube-api-access-dfrmm\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240008 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240057 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240118 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-config-data\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240140 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240184 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-config-data\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240214 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-logs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240262 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cphg\" (UniqueName: \"kubernetes.io/projected/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-kube-api-access-6cphg\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240310 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-public-tls-certs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240431 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-run-httpd\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.240510 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-log-httpd\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.241013 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-logs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.244743 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.245330 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.245468 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-public-tls-certs\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.245627 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.245972 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.246628 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-config-data\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.246637 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.251801 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-scripts\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.253210 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-config-data\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.258934 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfrmm\" (UniqueName: \"kubernetes.io/projected/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-kube-api-access-dfrmm\") pod \"nova-api-0\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.265816 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cphg\" (UniqueName: \"kubernetes.io/projected/27c4a155-3275-4a3e-9d1b-18cfb92f7d99-kube-api-access-6cphg\") pod \"ceilometer-0\" (UID: \"27c4a155-3275-4a3e-9d1b-18cfb92f7d99\") " pod="openstack/ceilometer-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.300428 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:27 crc kubenswrapper[4911]: I0606 09:34:27.370176 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:27.962447 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ceca2bc-e3b2-4914-ae37-5ca1438412bb" path="/var/lib/kubelet/pods/9ceca2bc-e3b2-4914-ae37-5ca1438412bb/volumes" Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:27.963508 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37" path="/var/lib/kubelet/pods/d7dc345f-6b7c-4f1b-8bbd-dfdec23e0f37/volumes" Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.098852 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.115867 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.357130 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.579350 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jun 06 09:34:28 crc kubenswrapper[4911]: W0606 09:34:28.581053 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27c4a155_3275_4a3e_9d1b_18cfb92f7d99.slice/crio-d2ee19d17b89abe94273eb94d924c36aa323363bd92e2a8c7b66c03c30cda34b WatchSource:0}: Error finding container d2ee19d17b89abe94273eb94d924c36aa323363bd92e2a8c7b66c03c30cda34b: Status 404 returned error can't find the container with id d2ee19d17b89abe94273eb94d924c36aa323363bd92e2a8c7b66c03c30cda34b Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.879299 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1","Type":"ContainerStarted","Data":"0c011a88ad0fe1a65100fc867f074267767fcf2587605063f55a22596b5e9e77"} Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.879645 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1","Type":"ContainerStarted","Data":"d35125f59375196143622813a71e6d6707864812d0a6443211b2b35f9270c445"} Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.881536 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"27c4a155-3275-4a3e-9d1b-18cfb92f7d99","Type":"ContainerStarted","Data":"d2ee19d17b89abe94273eb94d924c36aa323363bd92e2a8c7b66c03c30cda34b"} Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.900045 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jun 06 09:34:28 crc kubenswrapper[4911]: I0606 09:34:28.915603 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.915583572 podStartE2EDuration="2.915583572s" podCreationTimestamp="2025-06-06 09:34:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:28.913165859 +0000 UTC m=+1280.188591412" watchObservedRunningTime="2025-06-06 09:34:28.915583572 +0000 UTC m=+1280.191009115" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.184621 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-jwx66"] Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.186634 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.189012 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.189259 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.196479 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-jwx66"] Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.285263 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.285353 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt97d\" (UniqueName: \"kubernetes.io/projected/27435fe9-7ac4-4fbb-9137-dabee568caf7-kube-api-access-bt97d\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.285393 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-scripts\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.285441 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-config-data\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.387476 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-config-data\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.387619 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.387685 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt97d\" (UniqueName: \"kubernetes.io/projected/27435fe9-7ac4-4fbb-9137-dabee568caf7-kube-api-access-bt97d\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.387723 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-scripts\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.392583 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-config-data\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.392597 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.393138 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-scripts\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.405002 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt97d\" (UniqueName: \"kubernetes.io/projected/27435fe9-7ac4-4fbb-9137-dabee568caf7-kube-api-access-bt97d\") pod \"nova-cell1-cell-mapping-jwx66\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.425827 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.517956 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.522629 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-686c8bcc79-fgqg8"] Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.523193 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerName="dnsmasq-dns" containerID="cri-o://0da0ec17c4e5f7f9be78247f771ab685f6779d57962f755fb2c6e84a2f6c657d" gracePeriod=10 Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.890735 4911 generic.go:334] "Generic (PLEG): container finished" podID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerID="0da0ec17c4e5f7f9be78247f771ab685f6779d57962f755fb2c6e84a2f6c657d" exitCode=0 Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.891413 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" event={"ID":"0a07157b-eade-4d9d-8d5d-277ed8ff407e","Type":"ContainerDied","Data":"0da0ec17c4e5f7f9be78247f771ab685f6779d57962f755fb2c6e84a2f6c657d"} Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.893041 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1","Type":"ContainerStarted","Data":"137f7ca54f8970e0ba04f48a648d8f31d67652ce084d91c45406545fd199e0aa"} Jun 06 09:34:29 crc kubenswrapper[4911]: I0606 09:34:29.896724 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"27c4a155-3275-4a3e-9d1b-18cfb92f7d99","Type":"ContainerStarted","Data":"5a757ccdd909b1595429c6b71235ba01c2c464e453a71f2b199d2a2f2361faea"} Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.233998 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-jwx66"] Jun 06 09:34:30 crc kubenswrapper[4911]: W0606 09:34:30.397630 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27435fe9_7ac4_4fbb_9137_dabee568caf7.slice/crio-7e05099dd871e24ba144d3fc41d2d823e8d2b54231cdca988b60834476e3214c WatchSource:0}: Error finding container 7e05099dd871e24ba144d3fc41d2d823e8d2b54231cdca988b60834476e3214c: Status 404 returned error can't find the container with id 7e05099dd871e24ba144d3fc41d2d823e8d2b54231cdca988b60834476e3214c Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.839687 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.910882 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"27c4a155-3275-4a3e-9d1b-18cfb92f7d99","Type":"ContainerStarted","Data":"5d9fbf64afa0072edcede314883fd2ef9c16ed2b35ffed4dbc1b30005ce8bfb7"} Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.910933 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"27c4a155-3275-4a3e-9d1b-18cfb92f7d99","Type":"ContainerStarted","Data":"2081ce222d9355dae7b6529d9eaeedf0364fded3debea41a6177ec92b4a7929d"} Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.912626 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jwx66" event={"ID":"27435fe9-7ac4-4fbb-9137-dabee568caf7","Type":"ContainerStarted","Data":"0f31b44c501e518dd703c0f9b16c9a1528de5ab578f7213db722bf3043893154"} Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.912710 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jwx66" event={"ID":"27435fe9-7ac4-4fbb-9137-dabee568caf7","Type":"ContainerStarted","Data":"7e05099dd871e24ba144d3fc41d2d823e8d2b54231cdca988b60834476e3214c"} Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.916900 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.917157 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" event={"ID":"0a07157b-eade-4d9d-8d5d-277ed8ff407e","Type":"ContainerDied","Data":"d870554e6074fd449e70c75e9f71083f0aaf215e9fdbc2e2349e08a1fce6864c"} Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.917245 4911 scope.go:117] "RemoveContainer" containerID="0da0ec17c4e5f7f9be78247f771ab685f6779d57962f755fb2c6e84a2f6c657d" Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.930756 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-jwx66" podStartSLOduration=1.9307327970000001 podStartE2EDuration="1.930732797s" podCreationTimestamp="2025-06-06 09:34:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:30.930623584 +0000 UTC m=+1282.206049127" watchObservedRunningTime="2025-06-06 09:34:30.930732797 +0000 UTC m=+1282.206158340" Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.942364 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-nb\") pod \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.942409 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-svc\") pod \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.942565 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-swift-storage-0\") pod \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.942621 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-sb\") pod \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.942688 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64qqv\" (UniqueName: \"kubernetes.io/projected/0a07157b-eade-4d9d-8d5d-277ed8ff407e-kube-api-access-64qqv\") pod \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.942777 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-config\") pod \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\" (UID: \"0a07157b-eade-4d9d-8d5d-277ed8ff407e\") " Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.946115 4911 scope.go:117] "RemoveContainer" containerID="10432df8cde42a2b0832ed1159a6effbdc492a6dbfe845505e76efe265ad78be" Jun 06 09:34:30 crc kubenswrapper[4911]: I0606 09:34:30.950511 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a07157b-eade-4d9d-8d5d-277ed8ff407e-kube-api-access-64qqv" (OuterVolumeSpecName: "kube-api-access-64qqv") pod "0a07157b-eade-4d9d-8d5d-277ed8ff407e" (UID: "0a07157b-eade-4d9d-8d5d-277ed8ff407e"). InnerVolumeSpecName "kube-api-access-64qqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.017963 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0a07157b-eade-4d9d-8d5d-277ed8ff407e" (UID: "0a07157b-eade-4d9d-8d5d-277ed8ff407e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.018501 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0a07157b-eade-4d9d-8d5d-277ed8ff407e" (UID: "0a07157b-eade-4d9d-8d5d-277ed8ff407e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.026308 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-config" (OuterVolumeSpecName: "config") pod "0a07157b-eade-4d9d-8d5d-277ed8ff407e" (UID: "0a07157b-eade-4d9d-8d5d-277ed8ff407e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.029016 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0a07157b-eade-4d9d-8d5d-277ed8ff407e" (UID: "0a07157b-eade-4d9d-8d5d-277ed8ff407e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.035015 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0a07157b-eade-4d9d-8d5d-277ed8ff407e" (UID: "0a07157b-eade-4d9d-8d5d-277ed8ff407e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.045012 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.045045 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.045057 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64qqv\" (UniqueName: \"kubernetes.io/projected/0a07157b-eade-4d9d-8d5d-277ed8ff407e-kube-api-access-64qqv\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.045068 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.045077 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.045087 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0a07157b-eade-4d9d-8d5d-277ed8ff407e-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.247372 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-686c8bcc79-fgqg8"] Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.255818 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-686c8bcc79-fgqg8"] Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.822963 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vb54s"] Jun 06 09:34:31 crc kubenswrapper[4911]: E0606 09:34:31.823553 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerName="init" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.823568 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerName="init" Jun 06 09:34:31 crc kubenswrapper[4911]: E0606 09:34:31.823589 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerName="dnsmasq-dns" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.823595 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerName="dnsmasq-dns" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.823790 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerName="dnsmasq-dns" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.825112 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.837692 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vb54s"] Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.962784 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" path="/var/lib/kubelet/pods/0a07157b-eade-4d9d-8d5d-277ed8ff407e/volumes" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.963398 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-utilities\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.963468 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-catalog-content\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:31 crc kubenswrapper[4911]: I0606 09:34:31.963742 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kxsb\" (UniqueName: \"kubernetes.io/projected/edf2126b-0123-4e59-8cde-740115908297-kube-api-access-7kxsb\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.066126 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-utilities\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.066241 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-catalog-content\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.066383 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kxsb\" (UniqueName: \"kubernetes.io/projected/edf2126b-0123-4e59-8cde-740115908297-kube-api-access-7kxsb\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.068201 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-utilities\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.068347 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-catalog-content\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.090669 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kxsb\" (UniqueName: \"kubernetes.io/projected/edf2126b-0123-4e59-8cde-740115908297-kube-api-access-7kxsb\") pod \"redhat-operators-vb54s\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.150915 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.658302 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vb54s"] Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.940557 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"27c4a155-3275-4a3e-9d1b-18cfb92f7d99","Type":"ContainerStarted","Data":"2a54d932d13f2d09fdf2c631fd903d5246c71d6c554d9144719d932dba2ace0d"} Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.940690 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.942899 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vb54s" event={"ID":"edf2126b-0123-4e59-8cde-740115908297","Type":"ContainerStarted","Data":"840aba7492c745d6a483df645b41d7c55ecb581defb2dbe996544e0c071283df"} Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.942934 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vb54s" event={"ID":"edf2126b-0123-4e59-8cde-740115908297","Type":"ContainerStarted","Data":"7fa643d809872b54f86f670c947692555c10b9e94859394e354a6927d0b35e82"} Jun 06 09:34:32 crc kubenswrapper[4911]: I0606 09:34:32.965705 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.646237847 podStartE2EDuration="6.965684765s" podCreationTimestamp="2025-06-06 09:34:26 +0000 UTC" firstStartedPulling="2025-06-06 09:34:28.5859906 +0000 UTC m=+1279.861416143" lastFinishedPulling="2025-06-06 09:34:31.905437518 +0000 UTC m=+1283.180863061" observedRunningTime="2025-06-06 09:34:32.960893361 +0000 UTC m=+1284.236318914" watchObservedRunningTime="2025-06-06 09:34:32.965684765 +0000 UTC m=+1284.241110308" Jun 06 09:34:33 crc kubenswrapper[4911]: I0606 09:34:33.954388 4911 generic.go:334] "Generic (PLEG): container finished" podID="edf2126b-0123-4e59-8cde-740115908297" containerID="840aba7492c745d6a483df645b41d7c55ecb581defb2dbe996544e0c071283df" exitCode=0 Jun 06 09:34:33 crc kubenswrapper[4911]: I0606 09:34:33.961775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vb54s" event={"ID":"edf2126b-0123-4e59-8cde-740115908297","Type":"ContainerDied","Data":"840aba7492c745d6a483df645b41d7c55ecb581defb2dbe996544e0c071283df"} Jun 06 09:34:35 crc kubenswrapper[4911]: I0606 09:34:35.287871 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-686c8bcc79-fgqg8" podUID="0a07157b-eade-4d9d-8d5d-277ed8ff407e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.197:5353: i/o timeout" Jun 06 09:34:35 crc kubenswrapper[4911]: I0606 09:34:35.973535 4911 generic.go:334] "Generic (PLEG): container finished" podID="27435fe9-7ac4-4fbb-9137-dabee568caf7" containerID="0f31b44c501e518dd703c0f9b16c9a1528de5ab578f7213db722bf3043893154" exitCode=0 Jun 06 09:34:35 crc kubenswrapper[4911]: I0606 09:34:35.973607 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jwx66" event={"ID":"27435fe9-7ac4-4fbb-9137-dabee568caf7","Type":"ContainerDied","Data":"0f31b44c501e518dd703c0f9b16c9a1528de5ab578f7213db722bf3043893154"} Jun 06 09:34:35 crc kubenswrapper[4911]: I0606 09:34:35.975696 4911 generic.go:334] "Generic (PLEG): container finished" podID="edf2126b-0123-4e59-8cde-740115908297" containerID="061c490b9866cacdc5057239bc6f5380db70d96bd87cac7728f170566a4a0d77" exitCode=0 Jun 06 09:34:35 crc kubenswrapper[4911]: I0606 09:34:35.975728 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vb54s" event={"ID":"edf2126b-0123-4e59-8cde-740115908297","Type":"ContainerDied","Data":"061c490b9866cacdc5057239bc6f5380db70d96bd87cac7728f170566a4a0d77"} Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:36.999728 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vb54s" event={"ID":"edf2126b-0123-4e59-8cde-740115908297","Type":"ContainerStarted","Data":"c29ad5d19a6e8b242fc4dec91f7b51eb0a431cb33f5915fc234a2403e8547927"} Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.027517 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vb54s" podStartSLOduration=3.582880367 podStartE2EDuration="6.02749011s" podCreationTimestamp="2025-06-06 09:34:31 +0000 UTC" firstStartedPulling="2025-06-06 09:34:33.957116706 +0000 UTC m=+1285.232542249" lastFinishedPulling="2025-06-06 09:34:36.401726449 +0000 UTC m=+1287.677151992" observedRunningTime="2025-06-06 09:34:37.019943175 +0000 UTC m=+1288.295368728" watchObservedRunningTime="2025-06-06 09:34:37.02749011 +0000 UTC m=+1288.302915653" Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.301366 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.301426 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.878247 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.986418 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt97d\" (UniqueName: \"kubernetes.io/projected/27435fe9-7ac4-4fbb-9137-dabee568caf7-kube-api-access-bt97d\") pod \"27435fe9-7ac4-4fbb-9137-dabee568caf7\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.986574 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-config-data\") pod \"27435fe9-7ac4-4fbb-9137-dabee568caf7\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.986604 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-scripts\") pod \"27435fe9-7ac4-4fbb-9137-dabee568caf7\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " Jun 06 09:34:37 crc kubenswrapper[4911]: I0606 09:34:37.986641 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-combined-ca-bundle\") pod \"27435fe9-7ac4-4fbb-9137-dabee568caf7\" (UID: \"27435fe9-7ac4-4fbb-9137-dabee568caf7\") " Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:37.993654 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-scripts" (OuterVolumeSpecName: "scripts") pod "27435fe9-7ac4-4fbb-9137-dabee568caf7" (UID: "27435fe9-7ac4-4fbb-9137-dabee568caf7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.009616 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27435fe9-7ac4-4fbb-9137-dabee568caf7-kube-api-access-bt97d" (OuterVolumeSpecName: "kube-api-access-bt97d") pod "27435fe9-7ac4-4fbb-9137-dabee568caf7" (UID: "27435fe9-7ac4-4fbb-9137-dabee568caf7"). InnerVolumeSpecName "kube-api-access-bt97d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.029545 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27435fe9-7ac4-4fbb-9137-dabee568caf7" (UID: "27435fe9-7ac4-4fbb-9137-dabee568caf7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.031388 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jwx66" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.033035 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-config-data" (OuterVolumeSpecName: "config-data") pod "27435fe9-7ac4-4fbb-9137-dabee568caf7" (UID: "27435fe9-7ac4-4fbb-9137-dabee568caf7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.089018 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt97d\" (UniqueName: \"kubernetes.io/projected/27435fe9-7ac4-4fbb-9137-dabee568caf7-kube-api-access-bt97d\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.089063 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.089072 4911 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-scripts\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.089082 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27435fe9-7ac4-4fbb-9137-dabee568caf7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.114323 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jwx66" event={"ID":"27435fe9-7ac4-4fbb-9137-dabee568caf7","Type":"ContainerDied","Data":"7e05099dd871e24ba144d3fc41d2d823e8d2b54231cdca988b60834476e3214c"} Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.114374 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e05099dd871e24ba144d3fc41d2d823e8d2b54231cdca988b60834476e3214c" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.193424 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.193749 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-log" containerID="cri-o://0c011a88ad0fe1a65100fc867f074267767fcf2587605063f55a22596b5e9e77" gracePeriod=30 Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.193823 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-api" containerID="cri-o://137f7ca54f8970e0ba04f48a648d8f31d67652ce084d91c45406545fd199e0aa" gracePeriod=30 Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.200848 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": EOF" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.201133 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": EOF" Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.204974 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.205266 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="16c16194-2433-4e69-b40e-4a1f3d43795e" containerName="nova-scheduler-scheduler" containerID="cri-o://28466fac393f2dc6acbc3dfdd4e82c243a01ded0343b6c0ea47bdd77a696599a" gracePeriod=30 Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.231199 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.231640 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-log" containerID="cri-o://95f5747a9a073359a385c0831806e5f4fdebe51a44bb017f01fdef4c3490f928" gracePeriod=30 Jun 06 09:34:38 crc kubenswrapper[4911]: I0606 09:34:38.231945 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-metadata" containerID="cri-o://c8b536c9125d17e82a55ab08857cfcb8cd6e6fd8855ab8bf66c43a903e9f4108" gracePeriod=30 Jun 06 09:34:39 crc kubenswrapper[4911]: I0606 09:34:39.042585 4911 generic.go:334] "Generic (PLEG): container finished" podID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerID="95f5747a9a073359a385c0831806e5f4fdebe51a44bb017f01fdef4c3490f928" exitCode=143 Jun 06 09:34:39 crc kubenswrapper[4911]: I0606 09:34:39.042651 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6","Type":"ContainerDied","Data":"95f5747a9a073359a385c0831806e5f4fdebe51a44bb017f01fdef4c3490f928"} Jun 06 09:34:39 crc kubenswrapper[4911]: I0606 09:34:39.046218 4911 generic.go:334] "Generic (PLEG): container finished" podID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerID="0c011a88ad0fe1a65100fc867f074267767fcf2587605063f55a22596b5e9e77" exitCode=143 Jun 06 09:34:39 crc kubenswrapper[4911]: I0606 09:34:39.046277 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1","Type":"ContainerDied","Data":"0c011a88ad0fe1a65100fc867f074267767fcf2587605063f55a22596b5e9e77"} Jun 06 09:34:41 crc kubenswrapper[4911]: I0606 09:34:41.362467 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": read tcp 10.217.0.2:48518->10.217.0.201:8775: read: connection reset by peer" Jun 06 09:34:41 crc kubenswrapper[4911]: I0606 09:34:41.363432 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": read tcp 10.217.0.2:48530->10.217.0.201:8775: read: connection reset by peer" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.076776 4911 generic.go:334] "Generic (PLEG): container finished" podID="16c16194-2433-4e69-b40e-4a1f3d43795e" containerID="28466fac393f2dc6acbc3dfdd4e82c243a01ded0343b6c0ea47bdd77a696599a" exitCode=0 Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.076879 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16c16194-2433-4e69-b40e-4a1f3d43795e","Type":"ContainerDied","Data":"28466fac393f2dc6acbc3dfdd4e82c243a01ded0343b6c0ea47bdd77a696599a"} Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.078987 4911 generic.go:334] "Generic (PLEG): container finished" podID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerID="c8b536c9125d17e82a55ab08857cfcb8cd6e6fd8855ab8bf66c43a903e9f4108" exitCode=0 Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.079062 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6","Type":"ContainerDied","Data":"c8b536c9125d17e82a55ab08857cfcb8cd6e6fd8855ab8bf66c43a903e9f4108"} Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.151374 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.151444 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.208406 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.507823 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.682469 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-nova-metadata-tls-certs\") pod \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.682620 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-config-data\") pod \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.682752 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-logs\") pod \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.682877 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phc9g\" (UniqueName: \"kubernetes.io/projected/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-kube-api-access-phc9g\") pod \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.682929 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-combined-ca-bundle\") pod \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\" (UID: \"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6\") " Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.683909 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-logs" (OuterVolumeSpecName: "logs") pod "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" (UID: "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.689432 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-kube-api-access-phc9g" (OuterVolumeSpecName: "kube-api-access-phc9g") pod "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" (UID: "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6"). InnerVolumeSpecName "kube-api-access-phc9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.710824 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" (UID: "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.712492 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-config-data" (OuterVolumeSpecName: "config-data") pod "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" (UID: "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.744155 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" (UID: "c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.785856 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.785898 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phc9g\" (UniqueName: \"kubernetes.io/projected/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-kube-api-access-phc9g\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.785915 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.785929 4911 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.785940 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:42 crc kubenswrapper[4911]: I0606 09:34:42.900055 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.091407 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4vtb\" (UniqueName: \"kubernetes.io/projected/16c16194-2433-4e69-b40e-4a1f3d43795e-kube-api-access-d4vtb\") pod \"16c16194-2433-4e69-b40e-4a1f3d43795e\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.091728 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.091976 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-combined-ca-bundle\") pod \"16c16194-2433-4e69-b40e-4a1f3d43795e\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.092033 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-config-data\") pod \"16c16194-2433-4e69-b40e-4a1f3d43795e\" (UID: \"16c16194-2433-4e69-b40e-4a1f3d43795e\") " Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.091617 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"16c16194-2433-4e69-b40e-4a1f3d43795e","Type":"ContainerDied","Data":"6f9ed28323d7241bc758509049f249f183f3bf9bbe20afd4a6fee18cba5fe304"} Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.092270 4911 scope.go:117] "RemoveContainer" containerID="28466fac393f2dc6acbc3dfdd4e82c243a01ded0343b6c0ea47bdd77a696599a" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.095031 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16c16194-2433-4e69-b40e-4a1f3d43795e-kube-api-access-d4vtb" (OuterVolumeSpecName: "kube-api-access-d4vtb") pod "16c16194-2433-4e69-b40e-4a1f3d43795e" (UID: "16c16194-2433-4e69-b40e-4a1f3d43795e"). InnerVolumeSpecName "kube-api-access-d4vtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.098552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6","Type":"ContainerDied","Data":"31efb5722b2557153201180dc65cee56fb4957fd1580b254bc4497847c8eb88c"} Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.098569 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.116743 4911 scope.go:117] "RemoveContainer" containerID="c8b536c9125d17e82a55ab08857cfcb8cd6e6fd8855ab8bf66c43a903e9f4108" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.129014 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-config-data" (OuterVolumeSpecName: "config-data") pod "16c16194-2433-4e69-b40e-4a1f3d43795e" (UID: "16c16194-2433-4e69-b40e-4a1f3d43795e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.147336 4911 scope.go:117] "RemoveContainer" containerID="95f5747a9a073359a385c0831806e5f4fdebe51a44bb017f01fdef4c3490f928" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.149060 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16c16194-2433-4e69-b40e-4a1f3d43795e" (UID: "16c16194-2433-4e69-b40e-4a1f3d43795e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.149622 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.168485 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.178116 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.185851 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: E0606 09:34:43.186501 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27435fe9-7ac4-4fbb-9137-dabee568caf7" containerName="nova-manage" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186529 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="27435fe9-7ac4-4fbb-9137-dabee568caf7" containerName="nova-manage" Jun 06 09:34:43 crc kubenswrapper[4911]: E0606 09:34:43.186587 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-metadata" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186603 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-metadata" Jun 06 09:34:43 crc kubenswrapper[4911]: E0606 09:34:43.186616 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c16194-2433-4e69-b40e-4a1f3d43795e" containerName="nova-scheduler-scheduler" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186625 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c16194-2433-4e69-b40e-4a1f3d43795e" containerName="nova-scheduler-scheduler" Jun 06 09:34:43 crc kubenswrapper[4911]: E0606 09:34:43.186664 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-log" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186674 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-log" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186941 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-metadata" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186959 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="16c16194-2433-4e69-b40e-4a1f3d43795e" containerName="nova-scheduler-scheduler" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186974 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="27435fe9-7ac4-4fbb-9137-dabee568caf7" containerName="nova-manage" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.186992 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" containerName="nova-metadata-log" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.192533 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.208507 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.208588 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4vtb\" (UniqueName: \"kubernetes.io/projected/16c16194-2433-4e69-b40e-4a1f3d43795e-kube-api-access-d4vtb\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.208643 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c16194-2433-4e69-b40e-4a1f3d43795e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.216215 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.222684 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.231187 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.261995 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vb54s"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.310238 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kngld\" (UniqueName: \"kubernetes.io/projected/4ac4797d-5355-4252-85b5-2c3be041d3ed-kube-api-access-kngld\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.310293 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.310324 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-config-data\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.310399 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ac4797d-5355-4252-85b5-2c3be041d3ed-logs\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.312565 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.418947 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kngld\" (UniqueName: \"kubernetes.io/projected/4ac4797d-5355-4252-85b5-2c3be041d3ed-kube-api-access-kngld\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.419156 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.419571 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-config-data\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.419605 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ac4797d-5355-4252-85b5-2c3be041d3ed-logs\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.420711 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ac4797d-5355-4252-85b5-2c3be041d3ed-logs\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.421166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.423824 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-config-data\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.423910 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.424448 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ac4797d-5355-4252-85b5-2c3be041d3ed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.434436 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.443025 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kngld\" (UniqueName: \"kubernetes.io/projected/4ac4797d-5355-4252-85b5-2c3be041d3ed-kube-api-access-kngld\") pod \"nova-metadata-0\" (UID: \"4ac4797d-5355-4252-85b5-2c3be041d3ed\") " pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.455059 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.466767 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.469644 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.474498 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.476743 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.512734 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.523071 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shcpk\" (UniqueName: \"kubernetes.io/projected/f6b94193-31cf-475f-82b9-9229341065d8-kube-api-access-shcpk\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.523344 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b94193-31cf-475f-82b9-9229341065d8-config-data\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.523500 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b94193-31cf-475f-82b9-9229341065d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.625764 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shcpk\" (UniqueName: \"kubernetes.io/projected/f6b94193-31cf-475f-82b9-9229341065d8-kube-api-access-shcpk\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.626282 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b94193-31cf-475f-82b9-9229341065d8-config-data\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.626394 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b94193-31cf-475f-82b9-9229341065d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.635056 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b94193-31cf-475f-82b9-9229341065d8-config-data\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.635116 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b94193-31cf-475f-82b9-9229341065d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.644527 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shcpk\" (UniqueName: \"kubernetes.io/projected/f6b94193-31cf-475f-82b9-9229341065d8-kube-api-access-shcpk\") pod \"nova-scheduler-0\" (UID: \"f6b94193-31cf-475f-82b9-9229341065d8\") " pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.831081 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.964938 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16c16194-2433-4e69-b40e-4a1f3d43795e" path="/var/lib/kubelet/pods/16c16194-2433-4e69-b40e-4a1f3d43795e/volumes" Jun 06 09:34:43 crc kubenswrapper[4911]: I0606 09:34:43.969207 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6" path="/var/lib/kubelet/pods/c47e2eda-3e8b-4a88-af7a-c0ea0e27b3d6/volumes" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.113223 4911 generic.go:334] "Generic (PLEG): container finished" podID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerID="137f7ca54f8970e0ba04f48a648d8f31d67652ce084d91c45406545fd199e0aa" exitCode=0 Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.113369 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1","Type":"ContainerDied","Data":"137f7ca54f8970e0ba04f48a648d8f31d67652ce084d91c45406545fd199e0aa"} Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.199866 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jun 06 09:34:44 crc kubenswrapper[4911]: W0606 09:34:44.341516 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ac4797d_5355_4252_85b5_2c3be041d3ed.slice/crio-8d9ffae0ab53306a2bc91fc20dc8208599150ba09c80bba17d2e557e9da4826d WatchSource:0}: Error finding container 8d9ffae0ab53306a2bc91fc20dc8208599150ba09c80bba17d2e557e9da4826d: Status 404 returned error can't find the container with id 8d9ffae0ab53306a2bc91fc20dc8208599150ba09c80bba17d2e557e9da4826d Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.662525 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jun 06 09:34:44 crc kubenswrapper[4911]: W0606 09:34:44.669453 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6b94193_31cf_475f_82b9_9229341065d8.slice/crio-28440d355e2227f1e14da2346d866fb168286b96733bd02c417cae4a063659c7 WatchSource:0}: Error finding container 28440d355e2227f1e14da2346d866fb168286b96733bd02c417cae4a063659c7: Status 404 returned error can't find the container with id 28440d355e2227f1e14da2346d866fb168286b96733bd02c417cae4a063659c7 Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.775800 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.855236 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-combined-ca-bundle\") pod \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.855350 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-internal-tls-certs\") pod \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.855461 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-logs\") pod \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.855580 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfrmm\" (UniqueName: \"kubernetes.io/projected/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-kube-api-access-dfrmm\") pod \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.855623 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-config-data\") pod \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.855676 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-public-tls-certs\") pod \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\" (UID: \"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1\") " Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.860317 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-logs" (OuterVolumeSpecName: "logs") pod "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" (UID: "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.863353 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-kube-api-access-dfrmm" (OuterVolumeSpecName: "kube-api-access-dfrmm") pod "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" (UID: "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1"). InnerVolumeSpecName "kube-api-access-dfrmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.889159 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-config-data" (OuterVolumeSpecName: "config-data") pod "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" (UID: "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.889228 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" (UID: "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.912329 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" (UID: "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.914540 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" (UID: "7b9e138f-88b0-4428-bef5-c8d5cb61bbd1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.958107 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.958151 4911 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.958167 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.958182 4911 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.958194 4911 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-logs\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:44 crc kubenswrapper[4911]: I0606 09:34:44.958205 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfrmm\" (UniqueName: \"kubernetes.io/projected/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1-kube-api-access-dfrmm\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.127682 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f6b94193-31cf-475f-82b9-9229341065d8","Type":"ContainerStarted","Data":"e260efa796030f9e8a164278d5aff16ed9cbd33481e97ce813a49a5e8b522313"} Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.127730 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f6b94193-31cf-475f-82b9-9229341065d8","Type":"ContainerStarted","Data":"28440d355e2227f1e14da2346d866fb168286b96733bd02c417cae4a063659c7"} Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.130630 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b9e138f-88b0-4428-bef5-c8d5cb61bbd1","Type":"ContainerDied","Data":"d35125f59375196143622813a71e6d6707864812d0a6443211b2b35f9270c445"} Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.130675 4911 scope.go:117] "RemoveContainer" containerID="137f7ca54f8970e0ba04f48a648d8f31d67652ce084d91c45406545fd199e0aa" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.130783 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.136065 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ac4797d-5355-4252-85b5-2c3be041d3ed","Type":"ContainerStarted","Data":"a4d7e39f359982346185c6d9008e54ebb5e43508baf2ef191e6179f2938bbfd8"} Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.136142 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vb54s" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="registry-server" containerID="cri-o://c29ad5d19a6e8b242fc4dec91f7b51eb0a431cb33f5915fc234a2403e8547927" gracePeriod=2 Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.136155 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ac4797d-5355-4252-85b5-2c3be041d3ed","Type":"ContainerStarted","Data":"7bdb5dce26f2f00eacad5719d78e76f8965e8084b7758ceb36e40b2deb832942"} Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.136203 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ac4797d-5355-4252-85b5-2c3be041d3ed","Type":"ContainerStarted","Data":"8d9ffae0ab53306a2bc91fc20dc8208599150ba09c80bba17d2e557e9da4826d"} Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.152442 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.152388342 podStartE2EDuration="2.152388342s" podCreationTimestamp="2025-06-06 09:34:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:45.146775437 +0000 UTC m=+1296.422200980" watchObservedRunningTime="2025-06-06 09:34:45.152388342 +0000 UTC m=+1296.427813885" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.161631 4911 scope.go:117] "RemoveContainer" containerID="0c011a88ad0fe1a65100fc867f074267767fcf2587605063f55a22596b5e9e77" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.172878 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.17285781 podStartE2EDuration="2.17285781s" podCreationTimestamp="2025-06-06 09:34:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:45.171836173 +0000 UTC m=+1296.447261736" watchObservedRunningTime="2025-06-06 09:34:45.17285781 +0000 UTC m=+1296.448283353" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.204384 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.220307 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.230710 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:45 crc kubenswrapper[4911]: E0606 09:34:45.231264 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-api" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.231287 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-api" Jun 06 09:34:45 crc kubenswrapper[4911]: E0606 09:34:45.231305 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-log" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.231314 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-log" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.231552 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-log" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.231578 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" containerName="nova-api-api" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.232922 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.235889 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.236073 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.236260 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.239870 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.263440 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.263614 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-logs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.263641 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-public-tls-certs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.263663 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-config-data\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.263759 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.263803 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-752gl\" (UniqueName: \"kubernetes.io/projected/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-kube-api-access-752gl\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.366437 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-logs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.366741 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-public-tls-certs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.366765 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-config-data\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.366830 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.366850 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-752gl\" (UniqueName: \"kubernetes.io/projected/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-kube-api-access-752gl\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.366930 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.371793 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-logs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.373126 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-internal-tls-certs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.373149 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-config-data\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.373125 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.373765 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-public-tls-certs\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.383923 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-752gl\" (UniqueName: \"kubernetes.io/projected/b2ccf2c7-2f1c-48d9-812c-e148c3548e3a-kube-api-access-752gl\") pod \"nova-api-0\" (UID: \"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a\") " pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.619287 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jun 06 09:34:45 crc kubenswrapper[4911]: I0606 09:34:45.959689 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b9e138f-88b0-4428-bef5-c8d5cb61bbd1" path="/var/lib/kubelet/pods/7b9e138f-88b0-4428-bef5-c8d5cb61bbd1/volumes" Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.157497 4911 generic.go:334] "Generic (PLEG): container finished" podID="edf2126b-0123-4e59-8cde-740115908297" containerID="c29ad5d19a6e8b242fc4dec91f7b51eb0a431cb33f5915fc234a2403e8547927" exitCode=0 Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.157602 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vb54s" event={"ID":"edf2126b-0123-4e59-8cde-740115908297","Type":"ContainerDied","Data":"c29ad5d19a6e8b242fc4dec91f7b51eb0a431cb33f5915fc234a2403e8547927"} Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.209300 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.377067 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.388404 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kxsb\" (UniqueName: \"kubernetes.io/projected/edf2126b-0123-4e59-8cde-740115908297-kube-api-access-7kxsb\") pod \"edf2126b-0123-4e59-8cde-740115908297\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.388540 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-utilities\") pod \"edf2126b-0123-4e59-8cde-740115908297\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.388586 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-catalog-content\") pod \"edf2126b-0123-4e59-8cde-740115908297\" (UID: \"edf2126b-0123-4e59-8cde-740115908297\") " Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.389683 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-utilities" (OuterVolumeSpecName: "utilities") pod "edf2126b-0123-4e59-8cde-740115908297" (UID: "edf2126b-0123-4e59-8cde-740115908297"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.453513 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "edf2126b-0123-4e59-8cde-740115908297" (UID: "edf2126b-0123-4e59-8cde-740115908297"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.465905 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edf2126b-0123-4e59-8cde-740115908297-kube-api-access-7kxsb" (OuterVolumeSpecName: "kube-api-access-7kxsb") pod "edf2126b-0123-4e59-8cde-740115908297" (UID: "edf2126b-0123-4e59-8cde-740115908297"). InnerVolumeSpecName "kube-api-access-7kxsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.491267 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.491308 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edf2126b-0123-4e59-8cde-740115908297-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:46 crc kubenswrapper[4911]: I0606 09:34:46.491324 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kxsb\" (UniqueName: \"kubernetes.io/projected/edf2126b-0123-4e59-8cde-740115908297-kube-api-access-7kxsb\") on node \"crc\" DevicePath \"\"" Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.170826 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vb54s" event={"ID":"edf2126b-0123-4e59-8cde-740115908297","Type":"ContainerDied","Data":"7fa643d809872b54f86f670c947692555c10b9e94859394e354a6927d0b35e82"} Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.171161 4911 scope.go:117] "RemoveContainer" containerID="c29ad5d19a6e8b242fc4dec91f7b51eb0a431cb33f5915fc234a2403e8547927" Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.171121 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vb54s" Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.175870 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a","Type":"ContainerStarted","Data":"a723389d11ba243864265bfd325083888fa359f6b015c0dec7ded033d537bc74"} Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.176067 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a","Type":"ContainerStarted","Data":"fb8f96b9bbdf03cbffbfc2ef15c05fa90e1fd2fb259db97b9c63bfd656ae0ece"} Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.176082 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2ccf2c7-2f1c-48d9-812c-e148c3548e3a","Type":"ContainerStarted","Data":"6eeb13051d133869e86abaa28b6af820ea6cf6eb5f6fbda5030295e7b8ea3538"} Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.221227 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.221200863 podStartE2EDuration="2.221200863s" podCreationTimestamp="2025-06-06 09:34:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:34:47.19551021 +0000 UTC m=+1298.470935773" watchObservedRunningTime="2025-06-06 09:34:47.221200863 +0000 UTC m=+1298.496626406" Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.226328 4911 scope.go:117] "RemoveContainer" containerID="061c490b9866cacdc5057239bc6f5380db70d96bd87cac7728f170566a4a0d77" Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.233866 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vb54s"] Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.246826 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vb54s"] Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.254078 4911 scope.go:117] "RemoveContainer" containerID="840aba7492c745d6a483df645b41d7c55ecb581defb2dbe996544e0c071283df" Jun 06 09:34:47 crc kubenswrapper[4911]: I0606 09:34:47.959316 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edf2126b-0123-4e59-8cde-740115908297" path="/var/lib/kubelet/pods/edf2126b-0123-4e59-8cde-740115908297/volumes" Jun 06 09:34:48 crc kubenswrapper[4911]: I0606 09:34:48.513081 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jun 06 09:34:48 crc kubenswrapper[4911]: I0606 09:34:48.513197 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jun 06 09:34:48 crc kubenswrapper[4911]: I0606 09:34:48.831914 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jun 06 09:34:53 crc kubenswrapper[4911]: I0606 09:34:53.513797 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jun 06 09:34:53 crc kubenswrapper[4911]: I0606 09:34:53.514396 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jun 06 09:34:53 crc kubenswrapper[4911]: I0606 09:34:53.832563 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jun 06 09:34:53 crc kubenswrapper[4911]: I0606 09:34:53.861676 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jun 06 09:34:54 crc kubenswrapper[4911]: I0606 09:34:54.300288 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:34:54 crc kubenswrapper[4911]: I0606 09:34:54.300373 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:34:54 crc kubenswrapper[4911]: I0606 09:34:54.317051 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jun 06 09:34:54 crc kubenswrapper[4911]: I0606 09:34:54.526322 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4ac4797d-5355-4252-85b5-2c3be041d3ed" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:54 crc kubenswrapper[4911]: I0606 09:34:54.526360 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4ac4797d-5355-4252-85b5-2c3be041d3ed" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.210:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:55 crc kubenswrapper[4911]: I0606 09:34:55.620234 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:34:55 crc kubenswrapper[4911]: I0606 09:34:55.620591 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jun 06 09:34:56 crc kubenswrapper[4911]: I0606 09:34:56.633294 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b2ccf2c7-2f1c-48d9-812c-e148c3548e3a" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.212:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:56 crc kubenswrapper[4911]: I0606 09:34:56.633336 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b2ccf2c7-2f1c-48d9-812c-e148c3548e3a" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.212:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jun 06 09:34:57 crc kubenswrapper[4911]: I0606 09:34:57.381224 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.253524 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-67q9g"] Jun 06 09:35:02 crc kubenswrapper[4911]: E0606 09:35:02.255272 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="extract-utilities" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.255291 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="extract-utilities" Jun 06 09:35:02 crc kubenswrapper[4911]: E0606 09:35:02.255321 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="registry-server" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.255328 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="registry-server" Jun 06 09:35:02 crc kubenswrapper[4911]: E0606 09:35:02.255342 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="extract-content" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.255348 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="extract-content" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.255519 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="edf2126b-0123-4e59-8cde-740115908297" containerName="registry-server" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.256373 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.325196 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-host\") pod \"crc-debug-67q9g\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.325316 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78pd7\" (UniqueName: \"kubernetes.io/projected/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-kube-api-access-78pd7\") pod \"crc-debug-67q9g\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.427529 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-host\") pod \"crc-debug-67q9g\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.427674 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78pd7\" (UniqueName: \"kubernetes.io/projected/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-kube-api-access-78pd7\") pod \"crc-debug-67q9g\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.427927 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-host\") pod \"crc-debug-67q9g\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.448686 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78pd7\" (UniqueName: \"kubernetes.io/projected/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-kube-api-access-78pd7\") pod \"crc-debug-67q9g\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: I0606 09:35:02.577915 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-67q9g" Jun 06 09:35:02 crc kubenswrapper[4911]: W0606 09:35:02.617391 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf4ff90d_165d_4c1f_827c_905bb5fd7cac.slice/crio-d8a0d00c2c24383a7c2902e82f34e7495e0a903d476ac41aed7b7f3cfb66d747 WatchSource:0}: Error finding container d8a0d00c2c24383a7c2902e82f34e7495e0a903d476ac41aed7b7f3cfb66d747: Status 404 returned error can't find the container with id d8a0d00c2c24383a7c2902e82f34e7495e0a903d476ac41aed7b7f3cfb66d747 Jun 06 09:35:03 crc kubenswrapper[4911]: I0606 09:35:03.379243 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-67q9g" event={"ID":"cf4ff90d-165d-4c1f-827c-905bb5fd7cac","Type":"ContainerStarted","Data":"d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969"} Jun 06 09:35:03 crc kubenswrapper[4911]: I0606 09:35:03.379588 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-67q9g" event={"ID":"cf4ff90d-165d-4c1f-827c-905bb5fd7cac","Type":"ContainerStarted","Data":"d8a0d00c2c24383a7c2902e82f34e7495e0a903d476ac41aed7b7f3cfb66d747"} Jun 06 09:35:03 crc kubenswrapper[4911]: I0606 09:35:03.397944 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-67q9g" podStartSLOduration=1.397928444 podStartE2EDuration="1.397928444s" podCreationTimestamp="2025-06-06 09:35:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:35:03.396073296 +0000 UTC m=+1314.671498849" watchObservedRunningTime="2025-06-06 09:35:03.397928444 +0000 UTC m=+1314.673353987" Jun 06 09:35:03 crc kubenswrapper[4911]: I0606 09:35:03.519840 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jun 06 09:35:03 crc kubenswrapper[4911]: I0606 09:35:03.519913 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jun 06 09:35:03 crc kubenswrapper[4911]: I0606 09:35:03.524723 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jun 06 09:35:03 crc kubenswrapper[4911]: I0606 09:35:03.526644 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jun 06 09:35:05 crc kubenswrapper[4911]: I0606 09:35:05.628564 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jun 06 09:35:05 crc kubenswrapper[4911]: I0606 09:35:05.629619 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jun 06 09:35:05 crc kubenswrapper[4911]: I0606 09:35:05.631023 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jun 06 09:35:05 crc kubenswrapper[4911]: I0606 09:35:05.637219 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jun 06 09:35:06 crc kubenswrapper[4911]: I0606 09:35:06.408028 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jun 06 09:35:06 crc kubenswrapper[4911]: I0606 09:35:06.415554 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.305635 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-67q9g"] Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.306305 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-67q9g" podUID="cf4ff90d-165d-4c1f-827c-905bb5fd7cac" containerName="container-00" containerID="cri-o://d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969" gracePeriod=2 Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.319194 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-67q9g"] Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.416797 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-67q9g" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.475207 4911 generic.go:334] "Generic (PLEG): container finished" podID="cf4ff90d-165d-4c1f-827c-905bb5fd7cac" containerID="d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969" exitCode=0 Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.475265 4911 scope.go:117] "RemoveContainer" containerID="d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.475266 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-67q9g" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.502187 4911 scope.go:117] "RemoveContainer" containerID="d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969" Jun 06 09:35:13 crc kubenswrapper[4911]: E0606 09:35:13.503024 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969\": container with ID starting with d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969 not found: ID does not exist" containerID="d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.503108 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969"} err="failed to get container status \"d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969\": rpc error: code = NotFound desc = could not find container \"d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969\": container with ID starting with d47a697688332e3854881d2fc7c54876694c849fe36d39c0cc0161f5b44a4969 not found: ID does not exist" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.572865 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-host\") pod \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.572994 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-host" (OuterVolumeSpecName: "host") pod "cf4ff90d-165d-4c1f-827c-905bb5fd7cac" (UID: "cf4ff90d-165d-4c1f-827c-905bb5fd7cac"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.573026 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78pd7\" (UniqueName: \"kubernetes.io/projected/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-kube-api-access-78pd7\") pod \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\" (UID: \"cf4ff90d-165d-4c1f-827c-905bb5fd7cac\") " Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.573669 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.580542 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-kube-api-access-78pd7" (OuterVolumeSpecName: "kube-api-access-78pd7") pod "cf4ff90d-165d-4c1f-827c-905bb5fd7cac" (UID: "cf4ff90d-165d-4c1f-827c-905bb5fd7cac"). InnerVolumeSpecName "kube-api-access-78pd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.675725 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78pd7\" (UniqueName: \"kubernetes.io/projected/cf4ff90d-165d-4c1f-827c-905bb5fd7cac-kube-api-access-78pd7\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:13 crc kubenswrapper[4911]: I0606 09:35:13.982287 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf4ff90d-165d-4c1f-827c-905bb5fd7cac" path="/var/lib/kubelet/pods/cf4ff90d-165d-4c1f-827c-905bb5fd7cac/volumes" Jun 06 09:35:14 crc kubenswrapper[4911]: I0606 09:35:14.561208 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:35:15 crc kubenswrapper[4911]: I0606 09:35:15.493952 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:35:18 crc kubenswrapper[4911]: I0606 09:35:18.607741 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerName="rabbitmq" containerID="cri-o://db8390c7890d5f767239b22d5379e46e15bf8a2ac339e7d9caddb2d672658514" gracePeriod=604796 Jun 06 09:35:19 crc kubenswrapper[4911]: I0606 09:35:19.692025 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerName="rabbitmq" containerID="cri-o://c1b603770fd0771eb755ba8401e1fc4a29c04489ed1107eae7ebc65146557e23" gracePeriod=604796 Jun 06 09:35:22 crc kubenswrapper[4911]: I0606 09:35:22.347625 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5671: connect: connection refused" Jun 06 09:35:22 crc kubenswrapper[4911]: I0606 09:35:22.392678 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.300839 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.301226 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.301270 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.302058 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b85be7d4abe7e6126686c6349bcc9d33572e190ba6f10a48055108480e2a3749"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.302141 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://b85be7d4abe7e6126686c6349bcc9d33572e190ba6f10a48055108480e2a3749" gracePeriod=600 Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.583313 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="b85be7d4abe7e6126686c6349bcc9d33572e190ba6f10a48055108480e2a3749" exitCode=0 Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.583397 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"b85be7d4abe7e6126686c6349bcc9d33572e190ba6f10a48055108480e2a3749"} Jun 06 09:35:24 crc kubenswrapper[4911]: I0606 09:35:24.583865 4911 scope.go:117] "RemoveContainer" containerID="6b9847c4a123626a7be96b480b8b31ed0796d77df359c9b4543cc2db6085b4ee" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.595299 4911 generic.go:334] "Generic (PLEG): container finished" podID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerID="db8390c7890d5f767239b22d5379e46e15bf8a2ac339e7d9caddb2d672658514" exitCode=0 Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.595373 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abf307d7-9aa9-4d2f-9943-e3a085568096","Type":"ContainerDied","Data":"db8390c7890d5f767239b22d5379e46e15bf8a2ac339e7d9caddb2d672658514"} Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.598579 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1"} Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.719408 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850127 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44vbj\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-kube-api-access-44vbj\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850250 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850364 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-plugins-conf\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850433 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-tls\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850493 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-config-data\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850523 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abf307d7-9aa9-4d2f-9943-e3a085568096-erlang-cookie-secret\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850572 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-server-conf\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850619 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-confd\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850684 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abf307d7-9aa9-4d2f-9943-e3a085568096-pod-info\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850888 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-erlang-cookie\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.850925 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-plugins\") pod \"abf307d7-9aa9-4d2f-9943-e3a085568096\" (UID: \"abf307d7-9aa9-4d2f-9943-e3a085568096\") " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.851807 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.852439 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.856973 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.857894 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-kube-api-access-44vbj" (OuterVolumeSpecName: "kube-api-access-44vbj") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "kube-api-access-44vbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.859719 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abf307d7-9aa9-4d2f-9943-e3a085568096-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.860642 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.860657 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/abf307d7-9aa9-4d2f-9943-e3a085568096-pod-info" (OuterVolumeSpecName: "pod-info") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.860898 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.886713 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-config-data" (OuterVolumeSpecName: "config-data") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.930437 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-server-conf" (OuterVolumeSpecName: "server-conf") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.956696 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957115 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957129 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44vbj\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-kube-api-access-44vbj\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957165 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957180 4911 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-plugins-conf\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957192 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957203 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957214 4911 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/abf307d7-9aa9-4d2f-9943-e3a085568096-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957226 4911 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/abf307d7-9aa9-4d2f-9943-e3a085568096-server-conf\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.957237 4911 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/abf307d7-9aa9-4d2f-9943-e3a085568096-pod-info\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:25 crc kubenswrapper[4911]: I0606 09:35:25.996070 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.000864 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "abf307d7-9aa9-4d2f-9943-e3a085568096" (UID: "abf307d7-9aa9-4d2f-9943-e3a085568096"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.092533 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.092779 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/abf307d7-9aa9-4d2f-9943-e3a085568096-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.610572 4911 generic.go:334] "Generic (PLEG): container finished" podID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerID="c1b603770fd0771eb755ba8401e1fc4a29c04489ed1107eae7ebc65146557e23" exitCode=0 Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.610642 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4","Type":"ContainerDied","Data":"c1b603770fd0771eb755ba8401e1fc4a29c04489ed1107eae7ebc65146557e23"} Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.613385 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"abf307d7-9aa9-4d2f-9943-e3a085568096","Type":"ContainerDied","Data":"58588c900b6cd6229f07a204fe03e3a260511e1e4844bce13cd9c4eb378f8f04"} Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.613410 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.613418 4911 scope.go:117] "RemoveContainer" containerID="db8390c7890d5f767239b22d5379e46e15bf8a2ac339e7d9caddb2d672658514" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.639142 4911 scope.go:117] "RemoveContainer" containerID="6f0518a6b3cc2f944aa4e2c7b254c3bf8b64829224f9a2eb7ac9e7286d602d50" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.655961 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.666410 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.686580 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:35:26 crc kubenswrapper[4911]: E0606 09:35:26.687082 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4ff90d-165d-4c1f-827c-905bb5fd7cac" containerName="container-00" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.687127 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4ff90d-165d-4c1f-827c-905bb5fd7cac" containerName="container-00" Jun 06 09:35:26 crc kubenswrapper[4911]: E0606 09:35:26.687143 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerName="rabbitmq" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.687151 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerName="rabbitmq" Jun 06 09:35:26 crc kubenswrapper[4911]: E0606 09:35:26.687162 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerName="setup-container" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.687169 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerName="setup-container" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.687419 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" containerName="rabbitmq" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.687450 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4ff90d-165d-4c1f-827c-905bb5fd7cac" containerName="container-00" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.688871 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.691792 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.692118 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.692321 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.692437 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.696760 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.697066 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.697310 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-clfpc" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.705689 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.806947 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.806990 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/733f579d-aebd-484e-a85c-6e25204f363e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807017 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807082 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/733f579d-aebd-484e-a85c-6e25204f363e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807149 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807192 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807360 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84bsk\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-kube-api-access-84bsk\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807481 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807545 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807609 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.807668 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-config-data\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.826880 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.911864 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v99v4\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-kube-api-access-v99v4\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912224 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912248 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-pod-info\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912293 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-plugins-conf\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912350 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-tls\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912373 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-config-data\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912395 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-confd\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912510 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-erlang-cookie\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912645 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-erlang-cookie-secret\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912664 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-plugins\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912739 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-server-conf\") pod \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\" (UID: \"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4\") " Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.912978 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/733f579d-aebd-484e-a85c-6e25204f363e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913012 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913047 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913119 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84bsk\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-kube-api-access-84bsk\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913146 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913189 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-config-data\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913257 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913281 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/733f579d-aebd-484e-a85c-6e25204f363e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913301 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.913855 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.935057 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/733f579d-aebd-484e-a85c-6e25204f363e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.935878 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-kube-api-access-v99v4" (OuterVolumeSpecName: "kube-api-access-v99v4") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "kube-api-access-v99v4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.936459 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.937529 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.948252 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.950671 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.951386 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.952030 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/733f579d-aebd-484e-a85c-6e25204f363e-config-data\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.953854 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.958576 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.959072 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.974412 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-pod-info" (OuterVolumeSpecName: "pod-info") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jun 06 09:35:26 crc kubenswrapper[4911]: I0606 09:35:26.975658 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.007028 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/733f579d-aebd-484e-a85c-6e25204f363e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.024372 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.027395 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.027443 4911 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.027459 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.027471 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v99v4\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-kube-api-access-v99v4\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.027481 4911 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-pod-info\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.027492 4911 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-plugins-conf\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.029726 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.037167 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84bsk\" (UniqueName: \"kubernetes.io/projected/733f579d-aebd-484e-a85c-6e25204f363e-kube-api-access-84bsk\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.037545 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.114603 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-server-conf" (OuterVolumeSpecName: "server-conf") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.116951 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"733f579d-aebd-484e-a85c-6e25204f363e\") " pod="openstack/rabbitmq-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.129268 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.129320 4911 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-server-conf\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.129353 4911 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.140348 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-config-data" (OuterVolumeSpecName: "config-data") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.151835 4911 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.228453 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" (UID: "6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.231011 4911 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.231068 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.231084 4911 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.422946 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.632124 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4","Type":"ContainerDied","Data":"1239103cc83fefa92a7944200a759a97a358828d5f69d6891e4b31aeede6a5c6"} Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.632184 4911 scope.go:117] "RemoveContainer" containerID="c1b603770fd0771eb755ba8401e1fc4a29c04489ed1107eae7ebc65146557e23" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.632306 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.675957 4911 scope.go:117] "RemoveContainer" containerID="cb54fd730abcec5f423a30e6720800012bb616571840ae9eba50348833ebe2a7" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.755170 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.795477 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.809407 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:35:27 crc kubenswrapper[4911]: E0606 09:35:27.809990 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerName="rabbitmq" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.810007 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerName="rabbitmq" Jun 06 09:35:27 crc kubenswrapper[4911]: E0606 09:35:27.810032 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerName="setup-container" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.810040 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerName="setup-container" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.810316 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" containerName="rabbitmq" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.811636 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.814658 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.814686 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.814980 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.815161 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.815434 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.815632 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.815783 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-f7f6b" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.834664 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.900402 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jun 06 09:35:27 crc kubenswrapper[4911]: W0606 09:35:27.904768 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod733f579d_aebd_484e_a85c_6e25204f363e.slice/crio-c3e598b43b7104ce04967f3c02a78cd2b3e7cee4bc01b7f96c5d290ae0b6f2e5 WatchSource:0}: Error finding container c3e598b43b7104ce04967f3c02a78cd2b3e7cee4bc01b7f96c5d290ae0b6f2e5: Status 404 returned error can't find the container with id c3e598b43b7104ce04967f3c02a78cd2b3e7cee4bc01b7f96c5d290ae0b6f2e5 Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.948670 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwz5b\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-kube-api-access-vwz5b\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.951889 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952421 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952449 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952471 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952512 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952537 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952631 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952648 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952675 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.952692 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.963717 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4" path="/var/lib/kubelet/pods/6a6b5cde-8e4b-4433-a3b3-4f79eb0754b4/volumes" Jun 06 09:35:27 crc kubenswrapper[4911]: I0606 09:35:27.965063 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abf307d7-9aa9-4d2f-9943-e3a085568096" path="/var/lib/kubelet/pods/abf307d7-9aa9-4d2f-9943-e3a085568096/volumes" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.054792 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.054845 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.054885 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.054914 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055005 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwz5b\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-kube-api-access-vwz5b\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055060 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055148 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055177 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055203 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055262 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055306 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.055956 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.056048 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.056442 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.057208 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.057561 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.058403 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.060922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.060954 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.060952 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.061287 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.074031 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwz5b\" (UniqueName: \"kubernetes.io/projected/1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8-kube-api-access-vwz5b\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.097535 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8\") " pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.136818 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.575835 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jun 06 09:35:28 crc kubenswrapper[4911]: W0606 09:35:28.588956 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cd9c143_7cf0_4055_81a5_2b2ff33ae6b8.slice/crio-62e0fd249caeef9358e9bbf34e20d9ba8dba051e321dbab50c052a023518a357 WatchSource:0}: Error finding container 62e0fd249caeef9358e9bbf34e20d9ba8dba051e321dbab50c052a023518a357: Status 404 returned error can't find the container with id 62e0fd249caeef9358e9bbf34e20d9ba8dba051e321dbab50c052a023518a357 Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.645298 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8","Type":"ContainerStarted","Data":"62e0fd249caeef9358e9bbf34e20d9ba8dba051e321dbab50c052a023518a357"} Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.647110 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"733f579d-aebd-484e-a85c-6e25204f363e","Type":"ContainerStarted","Data":"e2da0e790dddd8ee9f02e1930be85252e297f39c8aa659e62d4674e949d81737"} Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.647151 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"733f579d-aebd-484e-a85c-6e25204f363e","Type":"ContainerStarted","Data":"c3e598b43b7104ce04967f3c02a78cd2b3e7cee4bc01b7f96c5d290ae0b6f2e5"} Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.910311 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86f5885cbf-lrqqt"] Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.912872 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.915371 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Jun 06 09:35:28 crc kubenswrapper[4911]: I0606 09:35:28.926429 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86f5885cbf-lrqqt"] Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.073854 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-config\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.073922 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-sb\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.073965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-openstack-edpm-ipam\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.074003 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz4j7\" (UniqueName: \"kubernetes.io/projected/3acb9417-b111-4cd8-80d1-f562f3f872f7-kube-api-access-mz4j7\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.074081 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-nb\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.074177 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-swift-storage-0\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.074212 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-svc\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.175971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-config\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.176373 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-sb\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.176417 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-openstack-edpm-ipam\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.176456 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz4j7\" (UniqueName: \"kubernetes.io/projected/3acb9417-b111-4cd8-80d1-f562f3f872f7-kube-api-access-mz4j7\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.176544 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-nb\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.176641 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-swift-storage-0\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.176673 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-svc\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.177266 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-config\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.177517 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-sb\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.177614 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-openstack-edpm-ipam\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.177781 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-swift-storage-0\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.177853 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-nb\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.178427 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-svc\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.200247 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz4j7\" (UniqueName: \"kubernetes.io/projected/3acb9417-b111-4cd8-80d1-f562f3f872f7-kube-api-access-mz4j7\") pod \"dnsmasq-dns-86f5885cbf-lrqqt\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.233369 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.664203 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8","Type":"ContainerStarted","Data":"8e77fbacb1ad057bbca352021fa961f77eb9a60951b608caaf880641b97193b5"} Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.742467 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86f5885cbf-lrqqt"] Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.797516 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dk2tj"] Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.806917 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.808948 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dk2tj"] Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.907905 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-catalog-content\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.907964 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md5tt\" (UniqueName: \"kubernetes.io/projected/15a9c549-f12d-4768-af2c-5a8c80fc95e5-kube-api-access-md5tt\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:29 crc kubenswrapper[4911]: I0606 09:35:29.908176 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-utilities\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.010226 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-utilities\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.010305 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-catalog-content\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.010349 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md5tt\" (UniqueName: \"kubernetes.io/projected/15a9c549-f12d-4768-af2c-5a8c80fc95e5-kube-api-access-md5tt\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.010878 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-utilities\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.011044 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-catalog-content\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.032924 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md5tt\" (UniqueName: \"kubernetes.io/projected/15a9c549-f12d-4768-af2c-5a8c80fc95e5-kube-api-access-md5tt\") pod \"redhat-marketplace-dk2tj\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.302119 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.679324 4911 generic.go:334] "Generic (PLEG): container finished" podID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerID="4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c" exitCode=0 Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.679420 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" event={"ID":"3acb9417-b111-4cd8-80d1-f562f3f872f7","Type":"ContainerDied","Data":"4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c"} Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.679501 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" event={"ID":"3acb9417-b111-4cd8-80d1-f562f3f872f7","Type":"ContainerStarted","Data":"f94c2a85a02c488e59362f5657624e1ba19de59077eff2530387fc5e8bb0269f"} Jun 06 09:35:30 crc kubenswrapper[4911]: I0606 09:35:30.764231 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dk2tj"] Jun 06 09:35:30 crc kubenswrapper[4911]: W0606 09:35:30.789332 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15a9c549_f12d_4768_af2c_5a8c80fc95e5.slice/crio-f67bd9302ba90e8242a48321a68802087977414f8294d39f3f5d5aa62ddd0212 WatchSource:0}: Error finding container f67bd9302ba90e8242a48321a68802087977414f8294d39f3f5d5aa62ddd0212: Status 404 returned error can't find the container with id f67bd9302ba90e8242a48321a68802087977414f8294d39f3f5d5aa62ddd0212 Jun 06 09:35:31 crc kubenswrapper[4911]: I0606 09:35:31.689482 4911 generic.go:334] "Generic (PLEG): container finished" podID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerID="9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025" exitCode=0 Jun 06 09:35:31 crc kubenswrapper[4911]: I0606 09:35:31.689549 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dk2tj" event={"ID":"15a9c549-f12d-4768-af2c-5a8c80fc95e5","Type":"ContainerDied","Data":"9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025"} Jun 06 09:35:31 crc kubenswrapper[4911]: I0606 09:35:31.689873 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dk2tj" event={"ID":"15a9c549-f12d-4768-af2c-5a8c80fc95e5","Type":"ContainerStarted","Data":"f67bd9302ba90e8242a48321a68802087977414f8294d39f3f5d5aa62ddd0212"} Jun 06 09:35:31 crc kubenswrapper[4911]: I0606 09:35:31.694317 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" event={"ID":"3acb9417-b111-4cd8-80d1-f562f3f872f7","Type":"ContainerStarted","Data":"d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3"} Jun 06 09:35:31 crc kubenswrapper[4911]: I0606 09:35:31.694609 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:31 crc kubenswrapper[4911]: I0606 09:35:31.739676 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" podStartSLOduration=3.7396566030000002 podStartE2EDuration="3.739656603s" podCreationTimestamp="2025-06-06 09:35:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:35:31.728049824 +0000 UTC m=+1343.003475387" watchObservedRunningTime="2025-06-06 09:35:31.739656603 +0000 UTC m=+1343.015082146" Jun 06 09:35:32 crc kubenswrapper[4911]: I0606 09:35:32.711415 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dk2tj" event={"ID":"15a9c549-f12d-4768-af2c-5a8c80fc95e5","Type":"ContainerStarted","Data":"7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705"} Jun 06 09:35:33 crc kubenswrapper[4911]: I0606 09:35:33.722924 4911 generic.go:334] "Generic (PLEG): container finished" podID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerID="7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705" exitCode=0 Jun 06 09:35:33 crc kubenswrapper[4911]: I0606 09:35:33.722992 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dk2tj" event={"ID":"15a9c549-f12d-4768-af2c-5a8c80fc95e5","Type":"ContainerDied","Data":"7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705"} Jun 06 09:35:34 crc kubenswrapper[4911]: I0606 09:35:34.738645 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dk2tj" event={"ID":"15a9c549-f12d-4768-af2c-5a8c80fc95e5","Type":"ContainerStarted","Data":"87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c"} Jun 06 09:35:34 crc kubenswrapper[4911]: I0606 09:35:34.770877 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dk2tj" podStartSLOduration=3.206447062 podStartE2EDuration="5.770859749s" podCreationTimestamp="2025-06-06 09:35:29 +0000 UTC" firstStartedPulling="2025-06-06 09:35:31.69221826 +0000 UTC m=+1342.967643803" lastFinishedPulling="2025-06-06 09:35:34.256630947 +0000 UTC m=+1345.532056490" observedRunningTime="2025-06-06 09:35:34.761660732 +0000 UTC m=+1346.037086295" watchObservedRunningTime="2025-06-06 09:35:34.770859749 +0000 UTC m=+1346.046285292" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.235002 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.297431 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-748b4c9f7f-q2697"] Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.297848 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerName="dnsmasq-dns" containerID="cri-o://5c6d5d3181ca899b730c2c2c04e206c965e5875b93e3477e3a4904a0d160cc83" gracePeriod=10 Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.427906 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.205:5353: connect: connection refused" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.463843 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68db4d6659-xfwpc"] Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.471740 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.474173 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68db4d6659-xfwpc"] Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.626176 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-config\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.626241 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-ovsdbserver-nb\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.626284 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-dns-swift-storage-0\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.626363 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-ovsdbserver-sb\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.626403 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l59sf\" (UniqueName: \"kubernetes.io/projected/32f99d86-aac4-4887-a65e-c05e81a506b0-kube-api-access-l59sf\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.626437 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-dns-svc\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.626641 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-openstack-edpm-ipam\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.731488 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-config\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.732109 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-ovsdbserver-nb\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.732245 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-dns-swift-storage-0\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.732345 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-config\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.732498 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-ovsdbserver-sb\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.732677 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l59sf\" (UniqueName: \"kubernetes.io/projected/32f99d86-aac4-4887-a65e-c05e81a506b0-kube-api-access-l59sf\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.732796 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-dns-svc\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.732956 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-openstack-edpm-ipam\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.733088 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-dns-swift-storage-0\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.733822 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-ovsdbserver-sb\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.734691 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-ovsdbserver-nb\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.734839 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-dns-svc\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.735040 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/32f99d86-aac4-4887-a65e-c05e81a506b0-openstack-edpm-ipam\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.770444 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l59sf\" (UniqueName: \"kubernetes.io/projected/32f99d86-aac4-4887-a65e-c05e81a506b0-kube-api-access-l59sf\") pod \"dnsmasq-dns-68db4d6659-xfwpc\" (UID: \"32f99d86-aac4-4887-a65e-c05e81a506b0\") " pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.803412 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.820404 4911 generic.go:334] "Generic (PLEG): container finished" podID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerID="5c6d5d3181ca899b730c2c2c04e206c965e5875b93e3477e3a4904a0d160cc83" exitCode=0 Jun 06 09:35:39 crc kubenswrapper[4911]: I0606 09:35:39.820460 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" event={"ID":"93fa7ffc-cc42-4e89-8f38-a74170fcbcba","Type":"ContainerDied","Data":"5c6d5d3181ca899b730c2c2c04e206c965e5875b93e3477e3a4904a0d160cc83"} Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.086329 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.242278 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-nb\") pod \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.242326 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndh7l\" (UniqueName: \"kubernetes.io/projected/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-kube-api-access-ndh7l\") pod \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.242466 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-svc\") pod \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.242521 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-config\") pod \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.242638 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-sb\") pod \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.242661 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-swift-storage-0\") pod \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\" (UID: \"93fa7ffc-cc42-4e89-8f38-a74170fcbcba\") " Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.249025 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-kube-api-access-ndh7l" (OuterVolumeSpecName: "kube-api-access-ndh7l") pod "93fa7ffc-cc42-4e89-8f38-a74170fcbcba" (UID: "93fa7ffc-cc42-4e89-8f38-a74170fcbcba"). InnerVolumeSpecName "kube-api-access-ndh7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.302396 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.303234 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.303974 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-config" (OuterVolumeSpecName: "config") pod "93fa7ffc-cc42-4e89-8f38-a74170fcbcba" (UID: "93fa7ffc-cc42-4e89-8f38-a74170fcbcba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.305262 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "93fa7ffc-cc42-4e89-8f38-a74170fcbcba" (UID: "93fa7ffc-cc42-4e89-8f38-a74170fcbcba"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.306435 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68db4d6659-xfwpc"] Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.310750 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "93fa7ffc-cc42-4e89-8f38-a74170fcbcba" (UID: "93fa7ffc-cc42-4e89-8f38-a74170fcbcba"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.314828 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "93fa7ffc-cc42-4e89-8f38-a74170fcbcba" (UID: "93fa7ffc-cc42-4e89-8f38-a74170fcbcba"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:40 crc kubenswrapper[4911]: W0606 09:35:40.315998 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32f99d86_aac4_4887_a65e_c05e81a506b0.slice/crio-aa848e44ea37da70d5cc7f5168005a9e541f6f99bbd56e2953db70e973505ab8 WatchSource:0}: Error finding container aa848e44ea37da70d5cc7f5168005a9e541f6f99bbd56e2953db70e973505ab8: Status 404 returned error can't find the container with id aa848e44ea37da70d5cc7f5168005a9e541f6f99bbd56e2953db70e973505ab8 Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.322817 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "93fa7ffc-cc42-4e89-8f38-a74170fcbcba" (UID: "93fa7ffc-cc42-4e89-8f38-a74170fcbcba"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.344986 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.345024 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndh7l\" (UniqueName: \"kubernetes.io/projected/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-kube-api-access-ndh7l\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.345044 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.345058 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.345069 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.345080 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93fa7ffc-cc42-4e89-8f38-a74170fcbcba-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.354121 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.831217 4911 generic.go:334] "Generic (PLEG): container finished" podID="32f99d86-aac4-4887-a65e-c05e81a506b0" containerID="e9bfbcda482b605ed95d636304495780663b427ff15cccd3bae8a22775bb6195" exitCode=0 Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.831305 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" event={"ID":"32f99d86-aac4-4887-a65e-c05e81a506b0","Type":"ContainerDied","Data":"e9bfbcda482b605ed95d636304495780663b427ff15cccd3bae8a22775bb6195"} Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.831631 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" event={"ID":"32f99d86-aac4-4887-a65e-c05e81a506b0","Type":"ContainerStarted","Data":"aa848e44ea37da70d5cc7f5168005a9e541f6f99bbd56e2953db70e973505ab8"} Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.834352 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" event={"ID":"93fa7ffc-cc42-4e89-8f38-a74170fcbcba","Type":"ContainerDied","Data":"f6d47e4ffadbe3e4c515cae6e43970dba0802a8078738b95a1f911ebe4956f46"} Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.834406 4911 scope.go:117] "RemoveContainer" containerID="5c6d5d3181ca899b730c2c2c04e206c965e5875b93e3477e3a4904a0d160cc83" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.834371 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748b4c9f7f-q2697" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.891277 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:40 crc kubenswrapper[4911]: I0606 09:35:40.951829 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dk2tj"] Jun 06 09:35:41 crc kubenswrapper[4911]: I0606 09:35:41.032835 4911 scope.go:117] "RemoveContainer" containerID="ab7be46a1af4dee0c92f0b635efd09e120d80e99d797de757f1391ab31c3dab2" Jun 06 09:35:41 crc kubenswrapper[4911]: I0606 09:35:41.065900 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-748b4c9f7f-q2697"] Jun 06 09:35:41 crc kubenswrapper[4911]: I0606 09:35:41.078392 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-748b4c9f7f-q2697"] Jun 06 09:35:41 crc kubenswrapper[4911]: I0606 09:35:41.849136 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" event={"ID":"32f99d86-aac4-4887-a65e-c05e81a506b0","Type":"ContainerStarted","Data":"3bfb29ffaa97aecf947b68a0eeb72213df22b2776acce1f4e977c05aedee487b"} Jun 06 09:35:41 crc kubenswrapper[4911]: I0606 09:35:41.849994 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:41 crc kubenswrapper[4911]: I0606 09:35:41.880710 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" podStartSLOduration=2.880673581 podStartE2EDuration="2.880673581s" podCreationTimestamp="2025-06-06 09:35:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:35:41.873030184 +0000 UTC m=+1353.148455747" watchObservedRunningTime="2025-06-06 09:35:41.880673581 +0000 UTC m=+1353.156099124" Jun 06 09:35:41 crc kubenswrapper[4911]: I0606 09:35:41.963383 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" path="/var/lib/kubelet/pods/93fa7ffc-cc42-4e89-8f38-a74170fcbcba/volumes" Jun 06 09:35:42 crc kubenswrapper[4911]: I0606 09:35:42.858836 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dk2tj" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="registry-server" containerID="cri-o://87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c" gracePeriod=2 Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.677303 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.817527 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-catalog-content\") pod \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.817583 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md5tt\" (UniqueName: \"kubernetes.io/projected/15a9c549-f12d-4768-af2c-5a8c80fc95e5-kube-api-access-md5tt\") pod \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.817680 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-utilities\") pod \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\" (UID: \"15a9c549-f12d-4768-af2c-5a8c80fc95e5\") " Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.819282 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-utilities" (OuterVolumeSpecName: "utilities") pod "15a9c549-f12d-4768-af2c-5a8c80fc95e5" (UID: "15a9c549-f12d-4768-af2c-5a8c80fc95e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.827521 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a9c549-f12d-4768-af2c-5a8c80fc95e5-kube-api-access-md5tt" (OuterVolumeSpecName: "kube-api-access-md5tt") pod "15a9c549-f12d-4768-af2c-5a8c80fc95e5" (UID: "15a9c549-f12d-4768-af2c-5a8c80fc95e5"). InnerVolumeSpecName "kube-api-access-md5tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.832664 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15a9c549-f12d-4768-af2c-5a8c80fc95e5" (UID: "15a9c549-f12d-4768-af2c-5a8c80fc95e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.870618 4911 generic.go:334] "Generic (PLEG): container finished" podID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerID="87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c" exitCode=0 Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.870682 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dk2tj" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.870683 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dk2tj" event={"ID":"15a9c549-f12d-4768-af2c-5a8c80fc95e5","Type":"ContainerDied","Data":"87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c"} Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.870765 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dk2tj" event={"ID":"15a9c549-f12d-4768-af2c-5a8c80fc95e5","Type":"ContainerDied","Data":"f67bd9302ba90e8242a48321a68802087977414f8294d39f3f5d5aa62ddd0212"} Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.870789 4911 scope.go:117] "RemoveContainer" containerID="87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.903758 4911 scope.go:117] "RemoveContainer" containerID="7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.911124 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dk2tj"] Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.920300 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.920337 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15a9c549-f12d-4768-af2c-5a8c80fc95e5-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.920349 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md5tt\" (UniqueName: \"kubernetes.io/projected/15a9c549-f12d-4768-af2c-5a8c80fc95e5-kube-api-access-md5tt\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.922398 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dk2tj"] Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.938079 4911 scope.go:117] "RemoveContainer" containerID="9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.958524 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" path="/var/lib/kubelet/pods/15a9c549-f12d-4768-af2c-5a8c80fc95e5/volumes" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.983361 4911 scope.go:117] "RemoveContainer" containerID="87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c" Jun 06 09:35:43 crc kubenswrapper[4911]: E0606 09:35:43.983946 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c\": container with ID starting with 87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c not found: ID does not exist" containerID="87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.983987 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c"} err="failed to get container status \"87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c\": rpc error: code = NotFound desc = could not find container \"87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c\": container with ID starting with 87cde7ab76d6df3afcddd50f0329e4a5927731234aef8d74a2f65773a8b5b94c not found: ID does not exist" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.984009 4911 scope.go:117] "RemoveContainer" containerID="7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705" Jun 06 09:35:43 crc kubenswrapper[4911]: E0606 09:35:43.984514 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705\": container with ID starting with 7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705 not found: ID does not exist" containerID="7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.984543 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705"} err="failed to get container status \"7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705\": rpc error: code = NotFound desc = could not find container \"7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705\": container with ID starting with 7ee8379c0dbcfc1add86ac2c55bd6590306101d95862840bc4ece418e57d2705 not found: ID does not exist" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.984580 4911 scope.go:117] "RemoveContainer" containerID="9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025" Jun 06 09:35:43 crc kubenswrapper[4911]: E0606 09:35:43.984853 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025\": container with ID starting with 9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025 not found: ID does not exist" containerID="9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025" Jun 06 09:35:43 crc kubenswrapper[4911]: I0606 09:35:43.984898 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025"} err="failed to get container status \"9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025\": rpc error: code = NotFound desc = could not find container \"9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025\": container with ID starting with 9413fe08be22aeb30f1e6ff6c0a539f9380ca5c54541f2a26bb8f8dc7cbe2025 not found: ID does not exist" Jun 06 09:35:49 crc kubenswrapper[4911]: I0606 09:35:49.805384 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68db4d6659-xfwpc" Jun 06 09:35:49 crc kubenswrapper[4911]: I0606 09:35:49.866560 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86f5885cbf-lrqqt"] Jun 06 09:35:49 crc kubenswrapper[4911]: I0606 09:35:49.866813 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" podUID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerName="dnsmasq-dns" containerID="cri-o://d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3" gracePeriod=10 Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.602169 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.776493 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-nb\") pod \"3acb9417-b111-4cd8-80d1-f562f3f872f7\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.776662 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-config\") pod \"3acb9417-b111-4cd8-80d1-f562f3f872f7\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.776711 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mz4j7\" (UniqueName: \"kubernetes.io/projected/3acb9417-b111-4cd8-80d1-f562f3f872f7-kube-api-access-mz4j7\") pod \"3acb9417-b111-4cd8-80d1-f562f3f872f7\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.776775 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-openstack-edpm-ipam\") pod \"3acb9417-b111-4cd8-80d1-f562f3f872f7\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.776908 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-svc\") pod \"3acb9417-b111-4cd8-80d1-f562f3f872f7\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.776951 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-swift-storage-0\") pod \"3acb9417-b111-4cd8-80d1-f562f3f872f7\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.777191 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-sb\") pod \"3acb9417-b111-4cd8-80d1-f562f3f872f7\" (UID: \"3acb9417-b111-4cd8-80d1-f562f3f872f7\") " Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.786362 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3acb9417-b111-4cd8-80d1-f562f3f872f7-kube-api-access-mz4j7" (OuterVolumeSpecName: "kube-api-access-mz4j7") pod "3acb9417-b111-4cd8-80d1-f562f3f872f7" (UID: "3acb9417-b111-4cd8-80d1-f562f3f872f7"). InnerVolumeSpecName "kube-api-access-mz4j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.847421 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3acb9417-b111-4cd8-80d1-f562f3f872f7" (UID: "3acb9417-b111-4cd8-80d1-f562f3f872f7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.859408 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3acb9417-b111-4cd8-80d1-f562f3f872f7" (UID: "3acb9417-b111-4cd8-80d1-f562f3f872f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.866708 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-config" (OuterVolumeSpecName: "config") pod "3acb9417-b111-4cd8-80d1-f562f3f872f7" (UID: "3acb9417-b111-4cd8-80d1-f562f3f872f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.867187 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3acb9417-b111-4cd8-80d1-f562f3f872f7" (UID: "3acb9417-b111-4cd8-80d1-f562f3f872f7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.867889 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3acb9417-b111-4cd8-80d1-f562f3f872f7" (UID: "3acb9417-b111-4cd8-80d1-f562f3f872f7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.868721 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "3acb9417-b111-4cd8-80d1-f562f3f872f7" (UID: "3acb9417-b111-4cd8-80d1-f562f3f872f7"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.881428 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.881466 4911 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.881480 4911 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-config\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.881492 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mz4j7\" (UniqueName: \"kubernetes.io/projected/3acb9417-b111-4cd8-80d1-f562f3f872f7-kube-api-access-mz4j7\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.881506 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.881516 4911 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-svc\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.881530 4911 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3acb9417-b111-4cd8-80d1-f562f3f872f7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.949654 4911 generic.go:334] "Generic (PLEG): container finished" podID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerID="d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3" exitCode=0 Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.949697 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" event={"ID":"3acb9417-b111-4cd8-80d1-f562f3f872f7","Type":"ContainerDied","Data":"d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3"} Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.949721 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" event={"ID":"3acb9417-b111-4cd8-80d1-f562f3f872f7","Type":"ContainerDied","Data":"f94c2a85a02c488e59362f5657624e1ba19de59077eff2530387fc5e8bb0269f"} Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.949738 4911 scope.go:117] "RemoveContainer" containerID="d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.949741 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f5885cbf-lrqqt" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.981367 4911 scope.go:117] "RemoveContainer" containerID="4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c" Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.987229 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86f5885cbf-lrqqt"] Jun 06 09:35:50 crc kubenswrapper[4911]: I0606 09:35:50.999728 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86f5885cbf-lrqqt"] Jun 06 09:35:51 crc kubenswrapper[4911]: I0606 09:35:51.037423 4911 scope.go:117] "RemoveContainer" containerID="d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3" Jun 06 09:35:51 crc kubenswrapper[4911]: E0606 09:35:51.039637 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3\": container with ID starting with d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3 not found: ID does not exist" containerID="d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3" Jun 06 09:35:51 crc kubenswrapper[4911]: I0606 09:35:51.039809 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3"} err="failed to get container status \"d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3\": rpc error: code = NotFound desc = could not find container \"d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3\": container with ID starting with d9b1acf1be798793b72838023a6ed7744a27770dfa213a9b08eaa43b481331e3 not found: ID does not exist" Jun 06 09:35:51 crc kubenswrapper[4911]: I0606 09:35:51.039844 4911 scope.go:117] "RemoveContainer" containerID="4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c" Jun 06 09:35:51 crc kubenswrapper[4911]: E0606 09:35:51.040405 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c\": container with ID starting with 4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c not found: ID does not exist" containerID="4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c" Jun 06 09:35:51 crc kubenswrapper[4911]: I0606 09:35:51.040477 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c"} err="failed to get container status \"4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c\": rpc error: code = NotFound desc = could not find container \"4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c\": container with ID starting with 4a7df66b96fe15de6378bd0aeb83a0c01cb6a10ab333d1117cba687ec4fb942c not found: ID does not exist" Jun 06 09:35:51 crc kubenswrapper[4911]: I0606 09:35:51.970730 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3acb9417-b111-4cd8-80d1-f562f3f872f7" path="/var/lib/kubelet/pods/3acb9417-b111-4cd8-80d1-f562f3f872f7/volumes" Jun 06 09:35:59 crc kubenswrapper[4911]: I0606 09:35:59.030177 4911 generic.go:334] "Generic (PLEG): container finished" podID="733f579d-aebd-484e-a85c-6e25204f363e" containerID="e2da0e790dddd8ee9f02e1930be85252e297f39c8aa659e62d4674e949d81737" exitCode=0 Jun 06 09:35:59 crc kubenswrapper[4911]: I0606 09:35:59.030290 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"733f579d-aebd-484e-a85c-6e25204f363e","Type":"ContainerDied","Data":"e2da0e790dddd8ee9f02e1930be85252e297f39c8aa659e62d4674e949d81737"} Jun 06 09:36:00 crc kubenswrapper[4911]: I0606 09:36:00.044035 4911 generic.go:334] "Generic (PLEG): container finished" podID="1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8" containerID="8e77fbacb1ad057bbca352021fa961f77eb9a60951b608caaf880641b97193b5" exitCode=0 Jun 06 09:36:00 crc kubenswrapper[4911]: I0606 09:36:00.044139 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8","Type":"ContainerDied","Data":"8e77fbacb1ad057bbca352021fa961f77eb9a60951b608caaf880641b97193b5"} Jun 06 09:36:00 crc kubenswrapper[4911]: I0606 09:36:00.047853 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"733f579d-aebd-484e-a85c-6e25204f363e","Type":"ContainerStarted","Data":"0fb214f1c243ed7aea7a913b93f8aff1530904af273ffc1290c788e5c9c318ab"} Jun 06 09:36:00 crc kubenswrapper[4911]: I0606 09:36:00.048065 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jun 06 09:36:00 crc kubenswrapper[4911]: I0606 09:36:00.111721 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.111694822 podStartE2EDuration="34.111694822s" podCreationTimestamp="2025-06-06 09:35:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:36:00.108008207 +0000 UTC m=+1371.383433770" watchObservedRunningTime="2025-06-06 09:36:00.111694822 +0000 UTC m=+1371.387120365" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.061805 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8","Type":"ContainerStarted","Data":"59cdc9f31c5440df123b2804f77f39c222d209847713023eff9397bd4f188153"} Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.091589 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.091557389 podStartE2EDuration="34.091557389s" podCreationTimestamp="2025-06-06 09:35:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:36:01.088633744 +0000 UTC m=+1372.364059297" watchObservedRunningTime="2025-06-06 09:36:01.091557389 +0000 UTC m=+1372.366982943" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.723952 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-xz6sc"] Jun 06 09:36:01 crc kubenswrapper[4911]: E0606 09:36:01.724395 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerName="init" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724588 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerName="init" Jun 06 09:36:01 crc kubenswrapper[4911]: E0606 09:36:01.724614 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="registry-server" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724622 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="registry-server" Jun 06 09:36:01 crc kubenswrapper[4911]: E0606 09:36:01.724662 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerName="dnsmasq-dns" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724669 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerName="dnsmasq-dns" Jun 06 09:36:01 crc kubenswrapper[4911]: E0606 09:36:01.724680 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerName="dnsmasq-dns" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724689 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerName="dnsmasq-dns" Jun 06 09:36:01 crc kubenswrapper[4911]: E0606 09:36:01.724703 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="extract-utilities" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724710 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="extract-utilities" Jun 06 09:36:01 crc kubenswrapper[4911]: E0606 09:36:01.724726 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerName="init" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724733 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerName="init" Jun 06 09:36:01 crc kubenswrapper[4911]: E0606 09:36:01.724750 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="extract-content" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724757 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="extract-content" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724975 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a9c549-f12d-4768-af2c-5a8c80fc95e5" containerName="registry-server" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.724989 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="93fa7ffc-cc42-4e89-8f38-a74170fcbcba" containerName="dnsmasq-dns" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.725006 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3acb9417-b111-4cd8-80d1-f562f3f872f7" containerName="dnsmasq-dns" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.726187 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xz6sc" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.821398 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxhs2\" (UniqueName: \"kubernetes.io/projected/603912cd-7698-4241-a9ae-927ae90862c6-kube-api-access-qxhs2\") pod \"crc-debug-xz6sc\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " pod="openstack/crc-debug-xz6sc" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.821649 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/603912cd-7698-4241-a9ae-927ae90862c6-host\") pod \"crc-debug-xz6sc\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " pod="openstack/crc-debug-xz6sc" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.924377 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxhs2\" (UniqueName: \"kubernetes.io/projected/603912cd-7698-4241-a9ae-927ae90862c6-kube-api-access-qxhs2\") pod \"crc-debug-xz6sc\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " pod="openstack/crc-debug-xz6sc" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.924453 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/603912cd-7698-4241-a9ae-927ae90862c6-host\") pod \"crc-debug-xz6sc\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " pod="openstack/crc-debug-xz6sc" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.924683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/603912cd-7698-4241-a9ae-927ae90862c6-host\") pod \"crc-debug-xz6sc\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " pod="openstack/crc-debug-xz6sc" Jun 06 09:36:01 crc kubenswrapper[4911]: I0606 09:36:01.952968 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxhs2\" (UniqueName: \"kubernetes.io/projected/603912cd-7698-4241-a9ae-927ae90862c6-kube-api-access-qxhs2\") pod \"crc-debug-xz6sc\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " pod="openstack/crc-debug-xz6sc" Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.049890 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xz6sc" Jun 06 09:36:02 crc kubenswrapper[4911]: W0606 09:36:02.086140 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod603912cd_7698_4241_a9ae_927ae90862c6.slice/crio-8ab7a1c76e34c84a5d2ba4dec673a5f914e8146ec724c89fa300a1a4d4f302aa WatchSource:0}: Error finding container 8ab7a1c76e34c84a5d2ba4dec673a5f914e8146ec724c89fa300a1a4d4f302aa: Status 404 returned error can't find the container with id 8ab7a1c76e34c84a5d2ba4dec673a5f914e8146ec724c89fa300a1a4d4f302aa Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.942469 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq"] Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.944381 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.950059 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.950307 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.950512 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.950664 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:36:02 crc kubenswrapper[4911]: I0606 09:36:02.972317 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq"] Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.048295 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.048653 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.048713 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lncn\" (UniqueName: \"kubernetes.io/projected/aee7dc49-e783-4847-bdba-f4e885cd4977-kube-api-access-2lncn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.049228 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.083664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-xz6sc" event={"ID":"603912cd-7698-4241-a9ae-927ae90862c6","Type":"ContainerStarted","Data":"54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a"} Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.083725 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-xz6sc" event={"ID":"603912cd-7698-4241-a9ae-927ae90862c6","Type":"ContainerStarted","Data":"8ab7a1c76e34c84a5d2ba4dec673a5f914e8146ec724c89fa300a1a4d4f302aa"} Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.096193 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-xz6sc" podStartSLOduration=2.096175939 podStartE2EDuration="2.096175939s" podCreationTimestamp="2025-06-06 09:36:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:36:03.095585244 +0000 UTC m=+1374.371010787" watchObservedRunningTime="2025-06-06 09:36:03.096175939 +0000 UTC m=+1374.371601482" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.151574 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.151625 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lncn\" (UniqueName: \"kubernetes.io/projected/aee7dc49-e783-4847-bdba-f4e885cd4977-kube-api-access-2lncn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.151741 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.151834 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.159774 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.159786 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.173366 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.180772 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lncn\" (UniqueName: \"kubernetes.io/projected/aee7dc49-e783-4847-bdba-f4e885cd4977-kube-api-access-2lncn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.269217 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:03 crc kubenswrapper[4911]: W0606 09:36:03.817599 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaee7dc49_e783_4847_bdba_f4e885cd4977.slice/crio-8f0b5ee6fcb34818b6eacbe712830758fab678f71f319aeb8554b22da25b74c6 WatchSource:0}: Error finding container 8f0b5ee6fcb34818b6eacbe712830758fab678f71f319aeb8554b22da25b74c6: Status 404 returned error can't find the container with id 8f0b5ee6fcb34818b6eacbe712830758fab678f71f319aeb8554b22da25b74c6 Jun 06 09:36:03 crc kubenswrapper[4911]: I0606 09:36:03.819867 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq"] Jun 06 09:36:04 crc kubenswrapper[4911]: I0606 09:36:04.093566 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" event={"ID":"aee7dc49-e783-4847-bdba-f4e885cd4977","Type":"ContainerStarted","Data":"8f0b5ee6fcb34818b6eacbe712830758fab678f71f319aeb8554b22da25b74c6"} Jun 06 09:36:08 crc kubenswrapper[4911]: I0606 09:36:08.137825 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.243730 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.339708 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-xz6sc"] Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.340219 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-xz6sc" podUID="603912cd-7698-4241-a9ae-927ae90862c6" containerName="container-00" containerID="cri-o://54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a" gracePeriod=2 Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.352133 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-xz6sc"] Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.509146 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xz6sc" Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.596992 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/603912cd-7698-4241-a9ae-927ae90862c6-host\") pod \"603912cd-7698-4241-a9ae-927ae90862c6\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.597257 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxhs2\" (UniqueName: \"kubernetes.io/projected/603912cd-7698-4241-a9ae-927ae90862c6-kube-api-access-qxhs2\") pod \"603912cd-7698-4241-a9ae-927ae90862c6\" (UID: \"603912cd-7698-4241-a9ae-927ae90862c6\") " Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.597398 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/603912cd-7698-4241-a9ae-927ae90862c6-host" (OuterVolumeSpecName: "host") pod "603912cd-7698-4241-a9ae-927ae90862c6" (UID: "603912cd-7698-4241-a9ae-927ae90862c6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.597696 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/603912cd-7698-4241-a9ae-927ae90862c6-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.614401 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/603912cd-7698-4241-a9ae-927ae90862c6-kube-api-access-qxhs2" (OuterVolumeSpecName: "kube-api-access-qxhs2") pod "603912cd-7698-4241-a9ae-927ae90862c6" (UID: "603912cd-7698-4241-a9ae-927ae90862c6"). InnerVolumeSpecName "kube-api-access-qxhs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.699690 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxhs2\" (UniqueName: \"kubernetes.io/projected/603912cd-7698-4241-a9ae-927ae90862c6-kube-api-access-qxhs2\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:13 crc kubenswrapper[4911]: I0606 09:36:13.961133 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="603912cd-7698-4241-a9ae-927ae90862c6" path="/var/lib/kubelet/pods/603912cd-7698-4241-a9ae-927ae90862c6/volumes" Jun 06 09:36:14 crc kubenswrapper[4911]: I0606 09:36:14.212851 4911 generic.go:334] "Generic (PLEG): container finished" podID="603912cd-7698-4241-a9ae-927ae90862c6" containerID="54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a" exitCode=0 Jun 06 09:36:14 crc kubenswrapper[4911]: I0606 09:36:14.212909 4911 scope.go:117] "RemoveContainer" containerID="54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a" Jun 06 09:36:14 crc kubenswrapper[4911]: I0606 09:36:14.212932 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xz6sc" Jun 06 09:36:14 crc kubenswrapper[4911]: I0606 09:36:14.214840 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" event={"ID":"aee7dc49-e783-4847-bdba-f4e885cd4977","Type":"ContainerStarted","Data":"c4e887e4e51291711d778b41fc46347eba10e06ec6640f6df91d35cb8c899170"} Jun 06 09:36:14 crc kubenswrapper[4911]: I0606 09:36:14.233075 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" podStartSLOduration=2.812146683 podStartE2EDuration="12.233059132s" podCreationTimestamp="2025-06-06 09:36:02 +0000 UTC" firstStartedPulling="2025-06-06 09:36:03.8203328 +0000 UTC m=+1375.095758343" lastFinishedPulling="2025-06-06 09:36:13.241245249 +0000 UTC m=+1384.516670792" observedRunningTime="2025-06-06 09:36:14.23181736 +0000 UTC m=+1385.507242923" watchObservedRunningTime="2025-06-06 09:36:14.233059132 +0000 UTC m=+1385.508484675" Jun 06 09:36:14 crc kubenswrapper[4911]: I0606 09:36:14.239110 4911 scope.go:117] "RemoveContainer" containerID="54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a" Jun 06 09:36:14 crc kubenswrapper[4911]: E0606 09:36:14.239555 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a\": container with ID starting with 54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a not found: ID does not exist" containerID="54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a" Jun 06 09:36:14 crc kubenswrapper[4911]: I0606 09:36:14.239585 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a"} err="failed to get container status \"54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a\": rpc error: code = NotFound desc = could not find container \"54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a\": container with ID starting with 54fa0a253c62e8319ca80502138fc0691678f4ecd231a3bec1c9dd11b6ba853a not found: ID does not exist" Jun 06 09:36:17 crc kubenswrapper[4911]: I0606 09:36:17.426251 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jun 06 09:36:17 crc kubenswrapper[4911]: I0606 09:36:17.650020 4911 scope.go:117] "RemoveContainer" containerID="04a9d8737dcf583de2631d4db21c886d7392e5b49046024ab08d7de012088b5c" Jun 06 09:36:17 crc kubenswrapper[4911]: I0606 09:36:17.682544 4911 scope.go:117] "RemoveContainer" containerID="ea065b61e819ce6b2afc0059fe4008cefcc9a3619ab6fe8dba72ce28afdb0218" Jun 06 09:36:18 crc kubenswrapper[4911]: I0606 09:36:18.139342 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.324775 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zpdb2"] Jun 06 09:36:26 crc kubenswrapper[4911]: E0606 09:36:26.326172 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="603912cd-7698-4241-a9ae-927ae90862c6" containerName="container-00" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.326189 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="603912cd-7698-4241-a9ae-927ae90862c6" containerName="container-00" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.326456 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="603912cd-7698-4241-a9ae-927ae90862c6" containerName="container-00" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.328396 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.339576 4911 generic.go:334] "Generic (PLEG): container finished" podID="aee7dc49-e783-4847-bdba-f4e885cd4977" containerID="c4e887e4e51291711d778b41fc46347eba10e06ec6640f6df91d35cb8c899170" exitCode=0 Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.340128 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" event={"ID":"aee7dc49-e783-4847-bdba-f4e885cd4977","Type":"ContainerDied","Data":"c4e887e4e51291711d778b41fc46347eba10e06ec6640f6df91d35cb8c899170"} Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.340648 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zpdb2"] Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.472736 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-catalog-content\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.472800 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-utilities\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.472864 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzf5k\" (UniqueName: \"kubernetes.io/projected/6326e56b-02e7-4dc7-84e3-73f752dbb614-kube-api-access-nzf5k\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.575168 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzf5k\" (UniqueName: \"kubernetes.io/projected/6326e56b-02e7-4dc7-84e3-73f752dbb614-kube-api-access-nzf5k\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.575357 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-catalog-content\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.575380 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-utilities\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.575928 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-utilities\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.576228 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-catalog-content\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.600136 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzf5k\" (UniqueName: \"kubernetes.io/projected/6326e56b-02e7-4dc7-84e3-73f752dbb614-kube-api-access-nzf5k\") pod \"certified-operators-zpdb2\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:26 crc kubenswrapper[4911]: I0606 09:36:26.658772 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:27 crc kubenswrapper[4911]: I0606 09:36:27.121947 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zpdb2"] Jun 06 09:36:27 crc kubenswrapper[4911]: W0606 09:36:27.124128 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6326e56b_02e7_4dc7_84e3_73f752dbb614.slice/crio-1dbd9c842dc7268fa4ae71b3e0450dd0051cba0715111cc7b30c17d4db74bf8a WatchSource:0}: Error finding container 1dbd9c842dc7268fa4ae71b3e0450dd0051cba0715111cc7b30c17d4db74bf8a: Status 404 returned error can't find the container with id 1dbd9c842dc7268fa4ae71b3e0450dd0051cba0715111cc7b30c17d4db74bf8a Jun 06 09:36:27 crc kubenswrapper[4911]: I0606 09:36:27.354028 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpdb2" event={"ID":"6326e56b-02e7-4dc7-84e3-73f752dbb614","Type":"ContainerStarted","Data":"33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16"} Jun 06 09:36:27 crc kubenswrapper[4911]: I0606 09:36:27.354519 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpdb2" event={"ID":"6326e56b-02e7-4dc7-84e3-73f752dbb614","Type":"ContainerStarted","Data":"1dbd9c842dc7268fa4ae71b3e0450dd0051cba0715111cc7b30c17d4db74bf8a"} Jun 06 09:36:27 crc kubenswrapper[4911]: I0606 09:36:27.958501 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.009937 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-ssh-key\") pod \"aee7dc49-e783-4847-bdba-f4e885cd4977\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.046553 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aee7dc49-e783-4847-bdba-f4e885cd4977" (UID: "aee7dc49-e783-4847-bdba-f4e885cd4977"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.111832 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-inventory\") pod \"aee7dc49-e783-4847-bdba-f4e885cd4977\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.111930 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lncn\" (UniqueName: \"kubernetes.io/projected/aee7dc49-e783-4847-bdba-f4e885cd4977-kube-api-access-2lncn\") pod \"aee7dc49-e783-4847-bdba-f4e885cd4977\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.112950 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-repo-setup-combined-ca-bundle\") pod \"aee7dc49-e783-4847-bdba-f4e885cd4977\" (UID: \"aee7dc49-e783-4847-bdba-f4e885cd4977\") " Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.113702 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.116588 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "aee7dc49-e783-4847-bdba-f4e885cd4977" (UID: "aee7dc49-e783-4847-bdba-f4e885cd4977"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.117360 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee7dc49-e783-4847-bdba-f4e885cd4977-kube-api-access-2lncn" (OuterVolumeSpecName: "kube-api-access-2lncn") pod "aee7dc49-e783-4847-bdba-f4e885cd4977" (UID: "aee7dc49-e783-4847-bdba-f4e885cd4977"). InnerVolumeSpecName "kube-api-access-2lncn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.142440 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-inventory" (OuterVolumeSpecName: "inventory") pod "aee7dc49-e783-4847-bdba-f4e885cd4977" (UID: "aee7dc49-e783-4847-bdba-f4e885cd4977"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.215176 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.215212 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lncn\" (UniqueName: \"kubernetes.io/projected/aee7dc49-e783-4847-bdba-f4e885cd4977-kube-api-access-2lncn\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.215223 4911 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aee7dc49-e783-4847-bdba-f4e885cd4977-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.370072 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.370136 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq" event={"ID":"aee7dc49-e783-4847-bdba-f4e885cd4977","Type":"ContainerDied","Data":"8f0b5ee6fcb34818b6eacbe712830758fab678f71f319aeb8554b22da25b74c6"} Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.370236 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f0b5ee6fcb34818b6eacbe712830758fab678f71f319aeb8554b22da25b74c6" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.374630 4911 generic.go:334] "Generic (PLEG): container finished" podID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerID="33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16" exitCode=0 Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.374841 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpdb2" event={"ID":"6326e56b-02e7-4dc7-84e3-73f752dbb614","Type":"ContainerDied","Data":"33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16"} Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.473825 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr"] Jun 06 09:36:28 crc kubenswrapper[4911]: E0606 09:36:28.474833 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee7dc49-e783-4847-bdba-f4e885cd4977" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.474863 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee7dc49-e783-4847-bdba-f4e885cd4977" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.475226 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee7dc49-e783-4847-bdba-f4e885cd4977" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.476201 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.478411 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.478438 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.479320 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.483356 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.483564 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr"] Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.624456 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.625028 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rsqt\" (UniqueName: \"kubernetes.io/projected/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-kube-api-access-7rsqt\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.625327 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.726712 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rsqt\" (UniqueName: \"kubernetes.io/projected/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-kube-api-access-7rsqt\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.726755 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.726890 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.731189 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.731316 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.747211 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rsqt\" (UniqueName: \"kubernetes.io/projected/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-kube-api-access-7rsqt\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-7llkr\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:28 crc kubenswrapper[4911]: I0606 09:36:28.791747 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:29 crc kubenswrapper[4911]: I0606 09:36:29.284433 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr"] Jun 06 09:36:29 crc kubenswrapper[4911]: I0606 09:36:29.393135 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" event={"ID":"29c902ea-8d52-4a94-a4a8-7479bd7ee53a","Type":"ContainerStarted","Data":"104eeb57e2ca16244579520b8cfdcf2c8c2032c905bb861b23321b143a153261"} Jun 06 09:36:30 crc kubenswrapper[4911]: I0606 09:36:30.406080 4911 generic.go:334] "Generic (PLEG): container finished" podID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerID="ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e" exitCode=0 Jun 06 09:36:30 crc kubenswrapper[4911]: I0606 09:36:30.406168 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpdb2" event={"ID":"6326e56b-02e7-4dc7-84e3-73f752dbb614","Type":"ContainerDied","Data":"ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e"} Jun 06 09:36:31 crc kubenswrapper[4911]: I0606 09:36:31.427692 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpdb2" event={"ID":"6326e56b-02e7-4dc7-84e3-73f752dbb614","Type":"ContainerStarted","Data":"fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad"} Jun 06 09:36:31 crc kubenswrapper[4911]: I0606 09:36:31.429257 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" event={"ID":"29c902ea-8d52-4a94-a4a8-7479bd7ee53a","Type":"ContainerStarted","Data":"a6f013a50b5d14755d50543c22df49117fdd1ff124971ad6ad27ff60140bc92a"} Jun 06 09:36:31 crc kubenswrapper[4911]: I0606 09:36:31.453613 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zpdb2" podStartSLOduration=2.874933707 podStartE2EDuration="5.453595608s" podCreationTimestamp="2025-06-06 09:36:26 +0000 UTC" firstStartedPulling="2025-06-06 09:36:28.377792743 +0000 UTC m=+1399.653218286" lastFinishedPulling="2025-06-06 09:36:30.956454644 +0000 UTC m=+1402.231880187" observedRunningTime="2025-06-06 09:36:31.445007418 +0000 UTC m=+1402.720432981" watchObservedRunningTime="2025-06-06 09:36:31.453595608 +0000 UTC m=+1402.729021151" Jun 06 09:36:31 crc kubenswrapper[4911]: I0606 09:36:31.467040 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" podStartSLOduration=2.4586050520000002 podStartE2EDuration="3.467016724s" podCreationTimestamp="2025-06-06 09:36:28 +0000 UTC" firstStartedPulling="2025-06-06 09:36:29.291645933 +0000 UTC m=+1400.567071476" lastFinishedPulling="2025-06-06 09:36:30.300057605 +0000 UTC m=+1401.575483148" observedRunningTime="2025-06-06 09:36:31.458238578 +0000 UTC m=+1402.733664131" watchObservedRunningTime="2025-06-06 09:36:31.467016724 +0000 UTC m=+1402.742442257" Jun 06 09:36:34 crc kubenswrapper[4911]: I0606 09:36:34.459827 4911 generic.go:334] "Generic (PLEG): container finished" podID="29c902ea-8d52-4a94-a4a8-7479bd7ee53a" containerID="a6f013a50b5d14755d50543c22df49117fdd1ff124971ad6ad27ff60140bc92a" exitCode=0 Jun 06 09:36:34 crc kubenswrapper[4911]: I0606 09:36:34.459934 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" event={"ID":"29c902ea-8d52-4a94-a4a8-7479bd7ee53a","Type":"ContainerDied","Data":"a6f013a50b5d14755d50543c22df49117fdd1ff124971ad6ad27ff60140bc92a"} Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.013775 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.134958 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-ssh-key\") pod \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.135031 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-inventory\") pod \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.135067 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rsqt\" (UniqueName: \"kubernetes.io/projected/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-kube-api-access-7rsqt\") pod \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\" (UID: \"29c902ea-8d52-4a94-a4a8-7479bd7ee53a\") " Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.143904 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-kube-api-access-7rsqt" (OuterVolumeSpecName: "kube-api-access-7rsqt") pod "29c902ea-8d52-4a94-a4a8-7479bd7ee53a" (UID: "29c902ea-8d52-4a94-a4a8-7479bd7ee53a"). InnerVolumeSpecName "kube-api-access-7rsqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.166412 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "29c902ea-8d52-4a94-a4a8-7479bd7ee53a" (UID: "29c902ea-8d52-4a94-a4a8-7479bd7ee53a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.168388 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-inventory" (OuterVolumeSpecName: "inventory") pod "29c902ea-8d52-4a94-a4a8-7479bd7ee53a" (UID: "29c902ea-8d52-4a94-a4a8-7479bd7ee53a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.238070 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.238128 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.238140 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rsqt\" (UniqueName: \"kubernetes.io/projected/29c902ea-8d52-4a94-a4a8-7479bd7ee53a-kube-api-access-7rsqt\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.483600 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" event={"ID":"29c902ea-8d52-4a94-a4a8-7479bd7ee53a","Type":"ContainerDied","Data":"104eeb57e2ca16244579520b8cfdcf2c8c2032c905bb861b23321b143a153261"} Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.483643 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="104eeb57e2ca16244579520b8cfdcf2c8c2032c905bb861b23321b143a153261" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.483699 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-7llkr" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.557266 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24"] Jun 06 09:36:36 crc kubenswrapper[4911]: E0606 09:36:36.557978 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c902ea-8d52-4a94-a4a8-7479bd7ee53a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.558000 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c902ea-8d52-4a94-a4a8-7479bd7ee53a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.558279 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c902ea-8d52-4a94-a4a8-7479bd7ee53a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.559235 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.561611 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.561746 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.561775 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.562013 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.564145 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24"] Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.659565 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.659624 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.711619 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.746525 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.746749 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.747240 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.747319 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55cf5\" (UniqueName: \"kubernetes.io/projected/a4c28cde-8e9e-469d-9960-ea174038d9ef-kube-api-access-55cf5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.849167 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.849293 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.849323 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55cf5\" (UniqueName: \"kubernetes.io/projected/a4c28cde-8e9e-469d-9960-ea174038d9ef-kube-api-access-55cf5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.849368 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.853683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.865158 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.868224 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.868402 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55cf5\" (UniqueName: \"kubernetes.io/projected/a4c28cde-8e9e-469d-9960-ea174038d9ef-kube-api-access-55cf5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-78t24\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:36 crc kubenswrapper[4911]: I0606 09:36:36.885025 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:36:37 crc kubenswrapper[4911]: I0606 09:36:37.446013 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24"] Jun 06 09:36:37 crc kubenswrapper[4911]: I0606 09:36:37.498916 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" event={"ID":"a4c28cde-8e9e-469d-9960-ea174038d9ef","Type":"ContainerStarted","Data":"efa1069adc984951f69a04dbcba62d67d02641496b532733a2a1dcb0512b979d"} Jun 06 09:36:37 crc kubenswrapper[4911]: I0606 09:36:37.549208 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:37 crc kubenswrapper[4911]: I0606 09:36:37.605135 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zpdb2"] Jun 06 09:36:38 crc kubenswrapper[4911]: I0606 09:36:38.510385 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" event={"ID":"a4c28cde-8e9e-469d-9960-ea174038d9ef","Type":"ContainerStarted","Data":"51697baf9d3e2defa06249ed88ef7f15193abe0e373420c8d9a3e8c10b088b44"} Jun 06 09:36:38 crc kubenswrapper[4911]: I0606 09:36:38.529679 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" podStartSLOduration=2.138632995 podStartE2EDuration="2.52966126s" podCreationTimestamp="2025-06-06 09:36:36 +0000 UTC" firstStartedPulling="2025-06-06 09:36:37.451795282 +0000 UTC m=+1408.727220835" lastFinishedPulling="2025-06-06 09:36:37.842823557 +0000 UTC m=+1409.118249100" observedRunningTime="2025-06-06 09:36:38.527976736 +0000 UTC m=+1409.803402289" watchObservedRunningTime="2025-06-06 09:36:38.52966126 +0000 UTC m=+1409.805086803" Jun 06 09:36:39 crc kubenswrapper[4911]: I0606 09:36:39.522417 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zpdb2" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="registry-server" containerID="cri-o://fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad" gracePeriod=2 Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.103997 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.229987 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-catalog-content\") pod \"6326e56b-02e7-4dc7-84e3-73f752dbb614\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.230025 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-utilities\") pod \"6326e56b-02e7-4dc7-84e3-73f752dbb614\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.230317 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzf5k\" (UniqueName: \"kubernetes.io/projected/6326e56b-02e7-4dc7-84e3-73f752dbb614-kube-api-access-nzf5k\") pod \"6326e56b-02e7-4dc7-84e3-73f752dbb614\" (UID: \"6326e56b-02e7-4dc7-84e3-73f752dbb614\") " Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.231010 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-utilities" (OuterVolumeSpecName: "utilities") pod "6326e56b-02e7-4dc7-84e3-73f752dbb614" (UID: "6326e56b-02e7-4dc7-84e3-73f752dbb614"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.235455 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6326e56b-02e7-4dc7-84e3-73f752dbb614-kube-api-access-nzf5k" (OuterVolumeSpecName: "kube-api-access-nzf5k") pod "6326e56b-02e7-4dc7-84e3-73f752dbb614" (UID: "6326e56b-02e7-4dc7-84e3-73f752dbb614"). InnerVolumeSpecName "kube-api-access-nzf5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.332876 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzf5k\" (UniqueName: \"kubernetes.io/projected/6326e56b-02e7-4dc7-84e3-73f752dbb614-kube-api-access-nzf5k\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.332919 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.516166 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6326e56b-02e7-4dc7-84e3-73f752dbb614" (UID: "6326e56b-02e7-4dc7-84e3-73f752dbb614"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.536445 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6326e56b-02e7-4dc7-84e3-73f752dbb614-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.537014 4911 generic.go:334] "Generic (PLEG): container finished" podID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerID="fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad" exitCode=0 Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.537065 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpdb2" event={"ID":"6326e56b-02e7-4dc7-84e3-73f752dbb614","Type":"ContainerDied","Data":"fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad"} Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.537118 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zpdb2" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.537153 4911 scope.go:117] "RemoveContainer" containerID="fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.537135 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zpdb2" event={"ID":"6326e56b-02e7-4dc7-84e3-73f752dbb614","Type":"ContainerDied","Data":"1dbd9c842dc7268fa4ae71b3e0450dd0051cba0715111cc7b30c17d4db74bf8a"} Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.573492 4911 scope.go:117] "RemoveContainer" containerID="ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.576672 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zpdb2"] Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.586703 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zpdb2"] Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.595559 4911 scope.go:117] "RemoveContainer" containerID="33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.649981 4911 scope.go:117] "RemoveContainer" containerID="fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad" Jun 06 09:36:40 crc kubenswrapper[4911]: E0606 09:36:40.650517 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad\": container with ID starting with fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad not found: ID does not exist" containerID="fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.650630 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad"} err="failed to get container status \"fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad\": rpc error: code = NotFound desc = could not find container \"fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad\": container with ID starting with fa3f152f62d5e41c54d7a499dde4e157ad1b8de9570cc11c96571045844025ad not found: ID does not exist" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.650769 4911 scope.go:117] "RemoveContainer" containerID="ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e" Jun 06 09:36:40 crc kubenswrapper[4911]: E0606 09:36:40.651333 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e\": container with ID starting with ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e not found: ID does not exist" containerID="ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.651375 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e"} err="failed to get container status \"ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e\": rpc error: code = NotFound desc = could not find container \"ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e\": container with ID starting with ae3ee7a2a70a7b5f7b614457103f76cf9c4dbd96ec98e10beb11eba3f48efb4e not found: ID does not exist" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.651404 4911 scope.go:117] "RemoveContainer" containerID="33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16" Jun 06 09:36:40 crc kubenswrapper[4911]: E0606 09:36:40.651860 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16\": container with ID starting with 33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16 not found: ID does not exist" containerID="33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16" Jun 06 09:36:40 crc kubenswrapper[4911]: I0606 09:36:40.651889 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16"} err="failed to get container status \"33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16\": rpc error: code = NotFound desc = could not find container \"33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16\": container with ID starting with 33a0422148cb278730d972bfe7593d55b1805c0610b13efa37da0501173c6f16 not found: ID does not exist" Jun 06 09:36:41 crc kubenswrapper[4911]: I0606 09:36:41.962146 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" path="/var/lib/kubelet/pods/6326e56b-02e7-4dc7-84e3-73f752dbb614/volumes" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.717848 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-m6tds"] Jun 06 09:37:01 crc kubenswrapper[4911]: E0606 09:37:01.718650 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="extract-utilities" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.718663 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="extract-utilities" Jun 06 09:37:01 crc kubenswrapper[4911]: E0606 09:37:01.718707 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="registry-server" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.718713 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="registry-server" Jun 06 09:37:01 crc kubenswrapper[4911]: E0606 09:37:01.718729 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="extract-content" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.718735 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="extract-content" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.718911 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6326e56b-02e7-4dc7-84e3-73f752dbb614" containerName="registry-server" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.719544 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m6tds" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.819752 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fhdd\" (UniqueName: \"kubernetes.io/projected/50ef67b8-5ad6-49ac-90df-3302fb57fd08-kube-api-access-6fhdd\") pod \"crc-debug-m6tds\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " pod="openstack/crc-debug-m6tds" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.819866 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50ef67b8-5ad6-49ac-90df-3302fb57fd08-host\") pod \"crc-debug-m6tds\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " pod="openstack/crc-debug-m6tds" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.921925 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fhdd\" (UniqueName: \"kubernetes.io/projected/50ef67b8-5ad6-49ac-90df-3302fb57fd08-kube-api-access-6fhdd\") pod \"crc-debug-m6tds\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " pod="openstack/crc-debug-m6tds" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.922025 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50ef67b8-5ad6-49ac-90df-3302fb57fd08-host\") pod \"crc-debug-m6tds\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " pod="openstack/crc-debug-m6tds" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.922266 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50ef67b8-5ad6-49ac-90df-3302fb57fd08-host\") pod \"crc-debug-m6tds\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " pod="openstack/crc-debug-m6tds" Jun 06 09:37:01 crc kubenswrapper[4911]: I0606 09:37:01.942341 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fhdd\" (UniqueName: \"kubernetes.io/projected/50ef67b8-5ad6-49ac-90df-3302fb57fd08-kube-api-access-6fhdd\") pod \"crc-debug-m6tds\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " pod="openstack/crc-debug-m6tds" Jun 06 09:37:02 crc kubenswrapper[4911]: I0606 09:37:02.043644 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m6tds" Jun 06 09:37:02 crc kubenswrapper[4911]: I0606 09:37:02.748717 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-m6tds" event={"ID":"50ef67b8-5ad6-49ac-90df-3302fb57fd08","Type":"ContainerStarted","Data":"145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67"} Jun 06 09:37:02 crc kubenswrapper[4911]: I0606 09:37:02.749255 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-m6tds" event={"ID":"50ef67b8-5ad6-49ac-90df-3302fb57fd08","Type":"ContainerStarted","Data":"007ab32cd2bf572465cfe2e248aacaea04125191a7e0638ce30461363d172881"} Jun 06 09:37:02 crc kubenswrapper[4911]: I0606 09:37:02.763267 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-m6tds" podStartSLOduration=1.763250395 podStartE2EDuration="1.763250395s" podCreationTimestamp="2025-06-06 09:37:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:37:02.760914185 +0000 UTC m=+1434.036339728" watchObservedRunningTime="2025-06-06 09:37:02.763250395 +0000 UTC m=+1434.038675938" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.631438 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-m6tds"] Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.632241 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-m6tds" podUID="50ef67b8-5ad6-49ac-90df-3302fb57fd08" containerName="container-00" containerID="cri-o://145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67" gracePeriod=2 Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.640754 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-m6tds"] Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.742438 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m6tds" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.849648 4911 generic.go:334] "Generic (PLEG): container finished" podID="50ef67b8-5ad6-49ac-90df-3302fb57fd08" containerID="145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67" exitCode=0 Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.849710 4911 scope.go:117] "RemoveContainer" containerID="145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.849858 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m6tds" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.874430 4911 scope.go:117] "RemoveContainer" containerID="145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67" Jun 06 09:37:12 crc kubenswrapper[4911]: E0606 09:37:12.874921 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67\": container with ID starting with 145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67 not found: ID does not exist" containerID="145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.874975 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67"} err="failed to get container status \"145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67\": rpc error: code = NotFound desc = could not find container \"145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67\": container with ID starting with 145773726d5bb5afc785bf63c620483c079388592775a5d1ba017c45b334ea67 not found: ID does not exist" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.890908 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50ef67b8-5ad6-49ac-90df-3302fb57fd08-host\") pod \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.891192 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fhdd\" (UniqueName: \"kubernetes.io/projected/50ef67b8-5ad6-49ac-90df-3302fb57fd08-kube-api-access-6fhdd\") pod \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\" (UID: \"50ef67b8-5ad6-49ac-90df-3302fb57fd08\") " Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.891017 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50ef67b8-5ad6-49ac-90df-3302fb57fd08-host" (OuterVolumeSpecName: "host") pod "50ef67b8-5ad6-49ac-90df-3302fb57fd08" (UID: "50ef67b8-5ad6-49ac-90df-3302fb57fd08"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.892289 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50ef67b8-5ad6-49ac-90df-3302fb57fd08-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.897384 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50ef67b8-5ad6-49ac-90df-3302fb57fd08-kube-api-access-6fhdd" (OuterVolumeSpecName: "kube-api-access-6fhdd") pod "50ef67b8-5ad6-49ac-90df-3302fb57fd08" (UID: "50ef67b8-5ad6-49ac-90df-3302fb57fd08"). InnerVolumeSpecName "kube-api-access-6fhdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:37:12 crc kubenswrapper[4911]: I0606 09:37:12.995880 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fhdd\" (UniqueName: \"kubernetes.io/projected/50ef67b8-5ad6-49ac-90df-3302fb57fd08-kube-api-access-6fhdd\") on node \"crc\" DevicePath \"\"" Jun 06 09:37:13 crc kubenswrapper[4911]: I0606 09:37:13.961885 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50ef67b8-5ad6-49ac-90df-3302fb57fd08" path="/var/lib/kubelet/pods/50ef67b8-5ad6-49ac-90df-3302fb57fd08/volumes" Jun 06 09:37:24 crc kubenswrapper[4911]: I0606 09:37:24.299863 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:37:24 crc kubenswrapper[4911]: I0606 09:37:24.300524 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:37:54 crc kubenswrapper[4911]: I0606 09:37:54.300691 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:37:54 crc kubenswrapper[4911]: I0606 09:37:54.301711 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:38:01 crc kubenswrapper[4911]: I0606 09:38:01.917681 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-94x27"] Jun 06 09:38:01 crc kubenswrapper[4911]: E0606 09:38:01.918704 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ef67b8-5ad6-49ac-90df-3302fb57fd08" containerName="container-00" Jun 06 09:38:01 crc kubenswrapper[4911]: I0606 09:38:01.918722 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ef67b8-5ad6-49ac-90df-3302fb57fd08" containerName="container-00" Jun 06 09:38:01 crc kubenswrapper[4911]: I0606 09:38:01.918970 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="50ef67b8-5ad6-49ac-90df-3302fb57fd08" containerName="container-00" Jun 06 09:38:01 crc kubenswrapper[4911]: I0606 09:38:01.919744 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.099184 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9676f662-0c0e-4d78-991b-1fb5418cde1a-host\") pod \"crc-debug-94x27\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.099328 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpnj9\" (UniqueName: \"kubernetes.io/projected/9676f662-0c0e-4d78-991b-1fb5418cde1a-kube-api-access-xpnj9\") pod \"crc-debug-94x27\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.202807 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9676f662-0c0e-4d78-991b-1fb5418cde1a-host\") pod \"crc-debug-94x27\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.203075 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpnj9\" (UniqueName: \"kubernetes.io/projected/9676f662-0c0e-4d78-991b-1fb5418cde1a-kube-api-access-xpnj9\") pod \"crc-debug-94x27\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.203975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9676f662-0c0e-4d78-991b-1fb5418cde1a-host\") pod \"crc-debug-94x27\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.233448 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpnj9\" (UniqueName: \"kubernetes.io/projected/9676f662-0c0e-4d78-991b-1fb5418cde1a-kube-api-access-xpnj9\") pod \"crc-debug-94x27\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.244693 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94x27" Jun 06 09:38:02 crc kubenswrapper[4911]: I0606 09:38:02.318429 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-94x27" event={"ID":"9676f662-0c0e-4d78-991b-1fb5418cde1a","Type":"ContainerStarted","Data":"d1280c6a2626f78c474c6bbe0d74fee60d73eb6cf3df8b86a7eb52d6a2083d22"} Jun 06 09:38:03 crc kubenswrapper[4911]: I0606 09:38:03.329639 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-94x27" event={"ID":"9676f662-0c0e-4d78-991b-1fb5418cde1a","Type":"ContainerStarted","Data":"8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88"} Jun 06 09:38:03 crc kubenswrapper[4911]: I0606 09:38:03.361648 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-94x27" podStartSLOduration=2.361628636 podStartE2EDuration="2.361628636s" podCreationTimestamp="2025-06-06 09:38:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:38:03.343164001 +0000 UTC m=+1494.618589564" watchObservedRunningTime="2025-06-06 09:38:03.361628636 +0000 UTC m=+1494.637054179" Jun 06 09:38:12 crc kubenswrapper[4911]: I0606 09:38:12.809072 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-94x27"] Jun 06 09:38:12 crc kubenswrapper[4911]: I0606 09:38:12.809847 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-94x27" podUID="9676f662-0c0e-4d78-991b-1fb5418cde1a" containerName="container-00" containerID="cri-o://8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88" gracePeriod=2 Jun 06 09:38:12 crc kubenswrapper[4911]: I0606 09:38:12.817862 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-94x27"] Jun 06 09:38:12 crc kubenswrapper[4911]: I0606 09:38:12.937516 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94x27" Jun 06 09:38:12 crc kubenswrapper[4911]: I0606 09:38:12.937971 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpnj9\" (UniqueName: \"kubernetes.io/projected/9676f662-0c0e-4d78-991b-1fb5418cde1a-kube-api-access-xpnj9\") pod \"9676f662-0c0e-4d78-991b-1fb5418cde1a\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " Jun 06 09:38:12 crc kubenswrapper[4911]: I0606 09:38:12.944121 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9676f662-0c0e-4d78-991b-1fb5418cde1a-kube-api-access-xpnj9" (OuterVolumeSpecName: "kube-api-access-xpnj9") pod "9676f662-0c0e-4d78-991b-1fb5418cde1a" (UID: "9676f662-0c0e-4d78-991b-1fb5418cde1a"). InnerVolumeSpecName "kube-api-access-xpnj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.039782 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9676f662-0c0e-4d78-991b-1fb5418cde1a-host\") pod \"9676f662-0c0e-4d78-991b-1fb5418cde1a\" (UID: \"9676f662-0c0e-4d78-991b-1fb5418cde1a\") " Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.039917 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9676f662-0c0e-4d78-991b-1fb5418cde1a-host" (OuterVolumeSpecName: "host") pod "9676f662-0c0e-4d78-991b-1fb5418cde1a" (UID: "9676f662-0c0e-4d78-991b-1fb5418cde1a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.040629 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpnj9\" (UniqueName: \"kubernetes.io/projected/9676f662-0c0e-4d78-991b-1fb5418cde1a-kube-api-access-xpnj9\") on node \"crc\" DevicePath \"\"" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.040648 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9676f662-0c0e-4d78-991b-1fb5418cde1a-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.427811 4911 generic.go:334] "Generic (PLEG): container finished" podID="9676f662-0c0e-4d78-991b-1fb5418cde1a" containerID="8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88" exitCode=0 Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.427866 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94x27" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.427870 4911 scope.go:117] "RemoveContainer" containerID="8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.450753 4911 scope.go:117] "RemoveContainer" containerID="8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88" Jun 06 09:38:13 crc kubenswrapper[4911]: E0606 09:38:13.451271 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88\": container with ID starting with 8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88 not found: ID does not exist" containerID="8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.451321 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88"} err="failed to get container status \"8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88\": rpc error: code = NotFound desc = could not find container \"8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88\": container with ID starting with 8fca26de8da2e3ec8295fcf26b57e707fa172d42dbc599ac665c77b7f1554c88 not found: ID does not exist" Jun 06 09:38:13 crc kubenswrapper[4911]: I0606 09:38:13.962803 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9676f662-0c0e-4d78-991b-1fb5418cde1a" path="/var/lib/kubelet/pods/9676f662-0c0e-4d78-991b-1fb5418cde1a/volumes" Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.300662 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.301224 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.301272 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.302079 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.302165 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" gracePeriod=600 Jun 06 09:38:24 crc kubenswrapper[4911]: E0606 09:38:24.423997 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.538028 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" exitCode=0 Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.538120 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1"} Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.538169 4911 scope.go:117] "RemoveContainer" containerID="b85be7d4abe7e6126686c6349bcc9d33572e190ba6f10a48055108480e2a3749" Jun 06 09:38:24 crc kubenswrapper[4911]: I0606 09:38:24.539016 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:38:24 crc kubenswrapper[4911]: E0606 09:38:24.539464 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:38:36 crc kubenswrapper[4911]: I0606 09:38:36.948520 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:38:36 crc kubenswrapper[4911]: E0606 09:38:36.949299 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:38:50 crc kubenswrapper[4911]: I0606 09:38:50.947854 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:38:50 crc kubenswrapper[4911]: E0606 09:38:50.948577 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:39:01 crc kubenswrapper[4911]: I0606 09:39:01.948864 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:39:01 crc kubenswrapper[4911]: E0606 09:39:01.949653 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.135646 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-ztxxg"] Jun 06 09:39:02 crc kubenswrapper[4911]: E0606 09:39:02.136104 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9676f662-0c0e-4d78-991b-1fb5418cde1a" containerName="container-00" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.136125 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9676f662-0c0e-4d78-991b-1fb5418cde1a" containerName="container-00" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.136339 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9676f662-0c0e-4d78-991b-1fb5418cde1a" containerName="container-00" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.137011 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.310844 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a441c0d9-153a-4447-95c8-d1749aa689b6-host\") pod \"crc-debug-ztxxg\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.310937 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99l7j\" (UniqueName: \"kubernetes.io/projected/a441c0d9-153a-4447-95c8-d1749aa689b6-kube-api-access-99l7j\") pod \"crc-debug-ztxxg\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.413450 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a441c0d9-153a-4447-95c8-d1749aa689b6-host\") pod \"crc-debug-ztxxg\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.413585 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99l7j\" (UniqueName: \"kubernetes.io/projected/a441c0d9-153a-4447-95c8-d1749aa689b6-kube-api-access-99l7j\") pod \"crc-debug-ztxxg\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.413609 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a441c0d9-153a-4447-95c8-d1749aa689b6-host\") pod \"crc-debug-ztxxg\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.433567 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99l7j\" (UniqueName: \"kubernetes.io/projected/a441c0d9-153a-4447-95c8-d1749aa689b6-kube-api-access-99l7j\") pod \"crc-debug-ztxxg\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.468866 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ztxxg" Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.913087 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-ztxxg" event={"ID":"a441c0d9-153a-4447-95c8-d1749aa689b6","Type":"ContainerStarted","Data":"619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506"} Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.913650 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-ztxxg" event={"ID":"a441c0d9-153a-4447-95c8-d1749aa689b6","Type":"ContainerStarted","Data":"dcea2c1690e3caaf5d90fd915671d96352f17bbdcd03f7b9479720b0920365d2"} Jun 06 09:39:02 crc kubenswrapper[4911]: I0606 09:39:02.937709 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-ztxxg" podStartSLOduration=0.937684106 podStartE2EDuration="937.684106ms" podCreationTimestamp="2025-06-06 09:39:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:39:02.926524589 +0000 UTC m=+1554.201950142" watchObservedRunningTime="2025-06-06 09:39:02.937684106 +0000 UTC m=+1554.213109649" Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.123663 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-ztxxg"] Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.125015 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-ztxxg" podUID="a441c0d9-153a-4447-95c8-d1749aa689b6" containerName="container-00" containerID="cri-o://619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506" gracePeriod=2 Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.134192 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-ztxxg"] Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.208882 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ztxxg" Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.346164 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99l7j\" (UniqueName: \"kubernetes.io/projected/a441c0d9-153a-4447-95c8-d1749aa689b6-kube-api-access-99l7j\") pod \"a441c0d9-153a-4447-95c8-d1749aa689b6\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.346729 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a441c0d9-153a-4447-95c8-d1749aa689b6-host\") pod \"a441c0d9-153a-4447-95c8-d1749aa689b6\" (UID: \"a441c0d9-153a-4447-95c8-d1749aa689b6\") " Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.346932 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a441c0d9-153a-4447-95c8-d1749aa689b6-host" (OuterVolumeSpecName: "host") pod "a441c0d9-153a-4447-95c8-d1749aa689b6" (UID: "a441c0d9-153a-4447-95c8-d1749aa689b6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.347532 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a441c0d9-153a-4447-95c8-d1749aa689b6-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.353199 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a441c0d9-153a-4447-95c8-d1749aa689b6-kube-api-access-99l7j" (OuterVolumeSpecName: "kube-api-access-99l7j") pod "a441c0d9-153a-4447-95c8-d1749aa689b6" (UID: "a441c0d9-153a-4447-95c8-d1749aa689b6"). InnerVolumeSpecName "kube-api-access-99l7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.449051 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99l7j\" (UniqueName: \"kubernetes.io/projected/a441c0d9-153a-4447-95c8-d1749aa689b6-kube-api-access-99l7j\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:13 crc kubenswrapper[4911]: I0606 09:39:13.961957 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a441c0d9-153a-4447-95c8-d1749aa689b6" path="/var/lib/kubelet/pods/a441c0d9-153a-4447-95c8-d1749aa689b6/volumes" Jun 06 09:39:14 crc kubenswrapper[4911]: I0606 09:39:14.026316 4911 generic.go:334] "Generic (PLEG): container finished" podID="a441c0d9-153a-4447-95c8-d1749aa689b6" containerID="619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506" exitCode=0 Jun 06 09:39:14 crc kubenswrapper[4911]: I0606 09:39:14.026382 4911 scope.go:117] "RemoveContainer" containerID="619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506" Jun 06 09:39:14 crc kubenswrapper[4911]: I0606 09:39:14.026463 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ztxxg" Jun 06 09:39:14 crc kubenswrapper[4911]: I0606 09:39:14.051458 4911 scope.go:117] "RemoveContainer" containerID="619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506" Jun 06 09:39:14 crc kubenswrapper[4911]: E0606 09:39:14.051975 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506\": container with ID starting with 619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506 not found: ID does not exist" containerID="619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506" Jun 06 09:39:14 crc kubenswrapper[4911]: I0606 09:39:14.052025 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506"} err="failed to get container status \"619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506\": rpc error: code = NotFound desc = could not find container \"619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506\": container with ID starting with 619293f98018079f731c7fe53d2fbc95302ec0de0d6ffd53d2815686f0139506 not found: ID does not exist" Jun 06 09:39:15 crc kubenswrapper[4911]: I0606 09:39:15.955411 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:39:15 crc kubenswrapper[4911]: E0606 09:39:15.955868 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:39:18 crc kubenswrapper[4911]: I0606 09:39:18.027894 4911 scope.go:117] "RemoveContainer" containerID="c7727c01e2fcc2cbcea1cfa164f6f8f40cdb6d4f2f1d356d7ff5d342baa49b54" Jun 06 09:39:18 crc kubenswrapper[4911]: I0606 09:39:18.059615 4911 scope.go:117] "RemoveContainer" containerID="5d1343d141c059a94a5c3e1a7b4989040f764ec3adb17a96f95dc552b836b175" Jun 06 09:39:18 crc kubenswrapper[4911]: I0606 09:39:18.083663 4911 scope.go:117] "RemoveContainer" containerID="073e38b9a7e8987546b532264cde763b88a042bd531364f08f0dfb36f1757b6e" Jun 06 09:39:18 crc kubenswrapper[4911]: I0606 09:39:18.114515 4911 scope.go:117] "RemoveContainer" containerID="3dd4662179adf4c8b70a0f3572c715c3e21b380a3394d8803d67149214cef055" Jun 06 09:39:18 crc kubenswrapper[4911]: I0606 09:39:18.139708 4911 scope.go:117] "RemoveContainer" containerID="74622eae93c5196a3c70307c4d89a71379eae1a53a92bc12e388051584ff9f3b" Jun 06 09:39:26 crc kubenswrapper[4911]: I0606 09:39:26.044676 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-s2nfn"] Jun 06 09:39:26 crc kubenswrapper[4911]: I0606 09:39:26.053834 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-s2nfn"] Jun 06 09:39:27 crc kubenswrapper[4911]: I0606 09:39:27.948247 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:39:27 crc kubenswrapper[4911]: E0606 09:39:27.948840 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:39:27 crc kubenswrapper[4911]: I0606 09:39:27.963686 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36ecd1b9-f118-413b-9e59-0c6d2f389c44" path="/var/lib/kubelet/pods/36ecd1b9-f118-413b-9e59-0c6d2f389c44/volumes" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.001420 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-clqk2"] Jun 06 09:39:28 crc kubenswrapper[4911]: E0606 09:39:28.002769 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a441c0d9-153a-4447-95c8-d1749aa689b6" containerName="container-00" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.002936 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a441c0d9-153a-4447-95c8-d1749aa689b6" containerName="container-00" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.003421 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a441c0d9-153a-4447-95c8-d1749aa689b6" containerName="container-00" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.008492 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.011633 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-clqk2"] Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.037441 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-j6gv7"] Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.051230 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-j6gv7"] Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.141321 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-catalog-content\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.142272 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-utilities\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.142482 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmv5d\" (UniqueName: \"kubernetes.io/projected/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-kube-api-access-nmv5d\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.245148 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-utilities\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.245279 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmv5d\" (UniqueName: \"kubernetes.io/projected/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-kube-api-access-nmv5d\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.245462 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-catalog-content\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.245692 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-utilities\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.246027 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-catalog-content\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.275385 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmv5d\" (UniqueName: \"kubernetes.io/projected/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-kube-api-access-nmv5d\") pod \"community-operators-clqk2\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.338763 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:28 crc kubenswrapper[4911]: I0606 09:39:28.841308 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-clqk2"] Jun 06 09:39:29 crc kubenswrapper[4911]: I0606 09:39:29.031207 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-vpfq6"] Jun 06 09:39:29 crc kubenswrapper[4911]: I0606 09:39:29.041020 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-vpfq6"] Jun 06 09:39:29 crc kubenswrapper[4911]: I0606 09:39:29.177797 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqk2" event={"ID":"6bf69688-d2e8-4fa7-aae0-4c630fc6234b","Type":"ContainerStarted","Data":"b8ff18b6b27f2116af46a4a68d86886c616e9d4494e563011f1fcac75456b81b"} Jun 06 09:39:29 crc kubenswrapper[4911]: I0606 09:39:29.960208 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="779f53c2-a052-43d1-8d17-09b3515e1812" path="/var/lib/kubelet/pods/779f53c2-a052-43d1-8d17-09b3515e1812/volumes" Jun 06 09:39:29 crc kubenswrapper[4911]: I0606 09:39:29.961658 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92b2b6d0-40dc-43a8-bcc1-6094912af4d2" path="/var/lib/kubelet/pods/92b2b6d0-40dc-43a8-bcc1-6094912af4d2/volumes" Jun 06 09:39:30 crc kubenswrapper[4911]: I0606 09:39:30.187904 4911 generic.go:334] "Generic (PLEG): container finished" podID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerID="f1405b47528c43db845f42761c2120e0914ec91ce52d99290e4794f09be95221" exitCode=0 Jun 06 09:39:30 crc kubenswrapper[4911]: I0606 09:39:30.187943 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqk2" event={"ID":"6bf69688-d2e8-4fa7-aae0-4c630fc6234b","Type":"ContainerDied","Data":"f1405b47528c43db845f42761c2120e0914ec91ce52d99290e4794f09be95221"} Jun 06 09:39:30 crc kubenswrapper[4911]: I0606 09:39:30.190409 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:39:34 crc kubenswrapper[4911]: I0606 09:39:34.223413 4911 generic.go:334] "Generic (PLEG): container finished" podID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerID="571fe5570aa8abfb791f5f2aaa695f77b8713a8be863db4fa0c834c9515f6b8c" exitCode=0 Jun 06 09:39:34 crc kubenswrapper[4911]: I0606 09:39:34.223456 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqk2" event={"ID":"6bf69688-d2e8-4fa7-aae0-4c630fc6234b","Type":"ContainerDied","Data":"571fe5570aa8abfb791f5f2aaa695f77b8713a8be863db4fa0c834c9515f6b8c"} Jun 06 09:39:35 crc kubenswrapper[4911]: I0606 09:39:35.234459 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqk2" event={"ID":"6bf69688-d2e8-4fa7-aae0-4c630fc6234b","Type":"ContainerStarted","Data":"52149ebc1aba09d2aaf6aa6462a867b040641303026c67f58db2d53cc5b7d51c"} Jun 06 09:39:35 crc kubenswrapper[4911]: I0606 09:39:35.257159 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-clqk2" podStartSLOduration=3.658412552 podStartE2EDuration="8.257137097s" podCreationTimestamp="2025-06-06 09:39:27 +0000 UTC" firstStartedPulling="2025-06-06 09:39:30.190173661 +0000 UTC m=+1581.465599204" lastFinishedPulling="2025-06-06 09:39:34.788898206 +0000 UTC m=+1586.064323749" observedRunningTime="2025-06-06 09:39:35.250971069 +0000 UTC m=+1586.526396612" watchObservedRunningTime="2025-06-06 09:39:35.257137097 +0000 UTC m=+1586.532562640" Jun 06 09:39:38 crc kubenswrapper[4911]: I0606 09:39:38.037483 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6042-account-create-ttp6q"] Jun 06 09:39:38 crc kubenswrapper[4911]: I0606 09:39:38.046197 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6042-account-create-ttp6q"] Jun 06 09:39:38 crc kubenswrapper[4911]: I0606 09:39:38.339706 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:38 crc kubenswrapper[4911]: I0606 09:39:38.340394 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:38 crc kubenswrapper[4911]: I0606 09:39:38.387141 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:39 crc kubenswrapper[4911]: I0606 09:39:39.034676 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-b189-account-create-hwbdc"] Jun 06 09:39:39 crc kubenswrapper[4911]: I0606 09:39:39.047757 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-b189-account-create-hwbdc"] Jun 06 09:39:39 crc kubenswrapper[4911]: I0606 09:39:39.958236 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16544562-77d9-4360-856e-5e70c369b7bb" path="/var/lib/kubelet/pods/16544562-77d9-4360-856e-5e70c369b7bb/volumes" Jun 06 09:39:39 crc kubenswrapper[4911]: I0606 09:39:39.959325 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="753820ef-863f-428c-bfc7-5dc548a56eb1" path="/var/lib/kubelet/pods/753820ef-863f-428c-bfc7-5dc548a56eb1/volumes" Jun 06 09:39:40 crc kubenswrapper[4911]: I0606 09:39:40.331788 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-clqk2" Jun 06 09:39:40 crc kubenswrapper[4911]: I0606 09:39:40.409672 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-clqk2"] Jun 06 09:39:40 crc kubenswrapper[4911]: I0606 09:39:40.457759 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rwvgf"] Jun 06 09:39:40 crc kubenswrapper[4911]: I0606 09:39:40.458057 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rwvgf" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="registry-server" containerID="cri-o://68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19" gracePeriod=2 Jun 06 09:39:40 crc kubenswrapper[4911]: I0606 09:39:40.947552 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:39:40 crc kubenswrapper[4911]: E0606 09:39:40.948216 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:39:40 crc kubenswrapper[4911]: I0606 09:39:40.991986 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.117418 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwcht\" (UniqueName: \"kubernetes.io/projected/dfa00874-57d4-44e2-a6e0-50c389978f21-kube-api-access-qwcht\") pod \"dfa00874-57d4-44e2-a6e0-50c389978f21\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.117689 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-utilities\") pod \"dfa00874-57d4-44e2-a6e0-50c389978f21\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.117758 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-catalog-content\") pod \"dfa00874-57d4-44e2-a6e0-50c389978f21\" (UID: \"dfa00874-57d4-44e2-a6e0-50c389978f21\") " Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.119014 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-utilities" (OuterVolumeSpecName: "utilities") pod "dfa00874-57d4-44e2-a6e0-50c389978f21" (UID: "dfa00874-57d4-44e2-a6e0-50c389978f21"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.124126 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfa00874-57d4-44e2-a6e0-50c389978f21-kube-api-access-qwcht" (OuterVolumeSpecName: "kube-api-access-qwcht") pod "dfa00874-57d4-44e2-a6e0-50c389978f21" (UID: "dfa00874-57d4-44e2-a6e0-50c389978f21"). InnerVolumeSpecName "kube-api-access-qwcht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.160031 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dfa00874-57d4-44e2-a6e0-50c389978f21" (UID: "dfa00874-57d4-44e2-a6e0-50c389978f21"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.221414 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwcht\" (UniqueName: \"kubernetes.io/projected/dfa00874-57d4-44e2-a6e0-50c389978f21-kube-api-access-qwcht\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.221457 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.221467 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfa00874-57d4-44e2-a6e0-50c389978f21-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.295728 4911 generic.go:334] "Generic (PLEG): container finished" podID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerID="68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19" exitCode=0 Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.295809 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rwvgf" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.295798 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rwvgf" event={"ID":"dfa00874-57d4-44e2-a6e0-50c389978f21","Type":"ContainerDied","Data":"68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19"} Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.295901 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rwvgf" event={"ID":"dfa00874-57d4-44e2-a6e0-50c389978f21","Type":"ContainerDied","Data":"d63bd79b3a99cb471cce708be74f7089d01400d6609f269fea62fe457974ef5b"} Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.295939 4911 scope.go:117] "RemoveContainer" containerID="68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.331748 4911 scope.go:117] "RemoveContainer" containerID="a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.332500 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rwvgf"] Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.341997 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rwvgf"] Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.362845 4911 scope.go:117] "RemoveContainer" containerID="49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.389721 4911 scope.go:117] "RemoveContainer" containerID="68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19" Jun 06 09:39:41 crc kubenswrapper[4911]: E0606 09:39:41.390168 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19\": container with ID starting with 68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19 not found: ID does not exist" containerID="68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.390202 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19"} err="failed to get container status \"68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19\": rpc error: code = NotFound desc = could not find container \"68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19\": container with ID starting with 68852764d4952cfeadc2125fb3d5acc0b85defedd462e5b1957ac096df043c19 not found: ID does not exist" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.390220 4911 scope.go:117] "RemoveContainer" containerID="a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393" Jun 06 09:39:41 crc kubenswrapper[4911]: E0606 09:39:41.390515 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393\": container with ID starting with a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393 not found: ID does not exist" containerID="a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.390559 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393"} err="failed to get container status \"a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393\": rpc error: code = NotFound desc = could not find container \"a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393\": container with ID starting with a9d0082b9c3f915b8ee997b3c34f34d17e89694d88cc9ad3314d8e8aad598393 not found: ID does not exist" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.390594 4911 scope.go:117] "RemoveContainer" containerID="49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70" Jun 06 09:39:41 crc kubenswrapper[4911]: E0606 09:39:41.390935 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70\": container with ID starting with 49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70 not found: ID does not exist" containerID="49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.390972 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70"} err="failed to get container status \"49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70\": rpc error: code = NotFound desc = could not find container \"49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70\": container with ID starting with 49b9f5c2f126302a844891a6bfc533d55f8ff6cf0a8bae8102ce08926c567f70 not found: ID does not exist" Jun 06 09:39:41 crc kubenswrapper[4911]: I0606 09:39:41.959068 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" path="/var/lib/kubelet/pods/dfa00874-57d4-44e2-a6e0-50c389978f21/volumes" Jun 06 09:39:44 crc kubenswrapper[4911]: I0606 09:39:44.028423 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-8066-account-create-94rck"] Jun 06 09:39:44 crc kubenswrapper[4911]: I0606 09:39:44.036525 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-8066-account-create-94rck"] Jun 06 09:39:45 crc kubenswrapper[4911]: I0606 09:39:45.960629 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d268c6a-6cc1-4e52-8893-ed48861ea882" path="/var/lib/kubelet/pods/1d268c6a-6cc1-4e52-8893-ed48861ea882/volumes" Jun 06 09:39:47 crc kubenswrapper[4911]: I0606 09:39:47.350994 4911 generic.go:334] "Generic (PLEG): container finished" podID="a4c28cde-8e9e-469d-9960-ea174038d9ef" containerID="51697baf9d3e2defa06249ed88ef7f15193abe0e373420c8d9a3e8c10b088b44" exitCode=0 Jun 06 09:39:47 crc kubenswrapper[4911]: I0606 09:39:47.351085 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" event={"ID":"a4c28cde-8e9e-469d-9960-ea174038d9ef","Type":"ContainerDied","Data":"51697baf9d3e2defa06249ed88ef7f15193abe0e373420c8d9a3e8c10b088b44"} Jun 06 09:39:48 crc kubenswrapper[4911]: I0606 09:39:48.996853 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.004733 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-inventory\") pod \"a4c28cde-8e9e-469d-9960-ea174038d9ef\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.004775 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-bootstrap-combined-ca-bundle\") pod \"a4c28cde-8e9e-469d-9960-ea174038d9ef\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.010237 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a4c28cde-8e9e-469d-9960-ea174038d9ef" (UID: "a4c28cde-8e9e-469d-9960-ea174038d9ef"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.037403 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-inventory" (OuterVolumeSpecName: "inventory") pod "a4c28cde-8e9e-469d-9960-ea174038d9ef" (UID: "a4c28cde-8e9e-469d-9960-ea174038d9ef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.106581 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55cf5\" (UniqueName: \"kubernetes.io/projected/a4c28cde-8e9e-469d-9960-ea174038d9ef-kube-api-access-55cf5\") pod \"a4c28cde-8e9e-469d-9960-ea174038d9ef\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.106638 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-ssh-key\") pod \"a4c28cde-8e9e-469d-9960-ea174038d9ef\" (UID: \"a4c28cde-8e9e-469d-9960-ea174038d9ef\") " Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.107271 4911 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.107299 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.111385 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4c28cde-8e9e-469d-9960-ea174038d9ef-kube-api-access-55cf5" (OuterVolumeSpecName: "kube-api-access-55cf5") pod "a4c28cde-8e9e-469d-9960-ea174038d9ef" (UID: "a4c28cde-8e9e-469d-9960-ea174038d9ef"). InnerVolumeSpecName "kube-api-access-55cf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.133195 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a4c28cde-8e9e-469d-9960-ea174038d9ef" (UID: "a4c28cde-8e9e-469d-9960-ea174038d9ef"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.209954 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4c28cde-8e9e-469d-9960-ea174038d9ef-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.209998 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55cf5\" (UniqueName: \"kubernetes.io/projected/a4c28cde-8e9e-469d-9960-ea174038d9ef-kube-api-access-55cf5\") on node \"crc\" DevicePath \"\"" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.378730 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" event={"ID":"a4c28cde-8e9e-469d-9960-ea174038d9ef","Type":"ContainerDied","Data":"efa1069adc984951f69a04dbcba62d67d02641496b532733a2a1dcb0512b979d"} Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.378784 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efa1069adc984951f69a04dbcba62d67d02641496b532733a2a1dcb0512b979d" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.378765 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-78t24" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.460788 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq"] Jun 06 09:39:49 crc kubenswrapper[4911]: E0606 09:39:49.461342 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="extract-utilities" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.461366 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="extract-utilities" Jun 06 09:39:49 crc kubenswrapper[4911]: E0606 09:39:49.461386 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4c28cde-8e9e-469d-9960-ea174038d9ef" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.461394 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4c28cde-8e9e-469d-9960-ea174038d9ef" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jun 06 09:39:49 crc kubenswrapper[4911]: E0606 09:39:49.461433 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="registry-server" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.461442 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="registry-server" Jun 06 09:39:49 crc kubenswrapper[4911]: E0606 09:39:49.461464 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="extract-content" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.461472 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="extract-content" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.461726 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4c28cde-8e9e-469d-9960-ea174038d9ef" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.461754 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfa00874-57d4-44e2-a6e0-50c389978f21" containerName="registry-server" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.462640 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.465210 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.465306 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.465435 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.465449 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.472068 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq"] Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.515258 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.515349 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsxq7\" (UniqueName: \"kubernetes.io/projected/aac6d9d8-4525-494e-8866-215dba01a06c-kube-api-access-hsxq7\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.515441 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.617453 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.617520 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsxq7\" (UniqueName: \"kubernetes.io/projected/aac6d9d8-4525-494e-8866-215dba01a06c-kube-api-access-hsxq7\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.617584 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.621673 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.622314 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.637595 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsxq7\" (UniqueName: \"kubernetes.io/projected/aac6d9d8-4525-494e-8866-215dba01a06c-kube-api-access-hsxq7\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:49 crc kubenswrapper[4911]: I0606 09:39:49.780728 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:39:50 crc kubenswrapper[4911]: I0606 09:39:50.296378 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq"] Jun 06 09:39:50 crc kubenswrapper[4911]: I0606 09:39:50.390525 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" event={"ID":"aac6d9d8-4525-494e-8866-215dba01a06c","Type":"ContainerStarted","Data":"4604fcc3c2d1a927889a782005bf58faded17121faa96b9fa2786a496585fcb1"} Jun 06 09:39:51 crc kubenswrapper[4911]: I0606 09:39:51.401923 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" event={"ID":"aac6d9d8-4525-494e-8866-215dba01a06c","Type":"ContainerStarted","Data":"715086899d9465e62ff93e47818abfe69b9473ad39f51808ead7466fe33be871"} Jun 06 09:39:51 crc kubenswrapper[4911]: I0606 09:39:51.426776 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" podStartSLOduration=1.971552833 podStartE2EDuration="2.426758259s" podCreationTimestamp="2025-06-06 09:39:49 +0000 UTC" firstStartedPulling="2025-06-06 09:39:50.306484021 +0000 UTC m=+1601.581909564" lastFinishedPulling="2025-06-06 09:39:50.761689447 +0000 UTC m=+1602.037114990" observedRunningTime="2025-06-06 09:39:51.421421952 +0000 UTC m=+1602.696847515" watchObservedRunningTime="2025-06-06 09:39:51.426758259 +0000 UTC m=+1602.702183802" Jun 06 09:39:54 crc kubenswrapper[4911]: I0606 09:39:54.948637 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:39:54 crc kubenswrapper[4911]: E0606 09:39:54.949464 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.056836 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-mvtw8"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.066474 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-xg7d9"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.075862 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-x87xj"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.084838 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-qt79z"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.093019 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-mvtw8"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.101504 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-x87xj"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.109416 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-xg7d9"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.116809 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-qt79z"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.463866 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-tfz5b"] Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.465732 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.505299 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-host\") pod \"crc-debug-tfz5b\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.505568 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lqfq\" (UniqueName: \"kubernetes.io/projected/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-kube-api-access-9lqfq\") pod \"crc-debug-tfz5b\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.608279 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lqfq\" (UniqueName: \"kubernetes.io/projected/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-kube-api-access-9lqfq\") pod \"crc-debug-tfz5b\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.609286 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-host\") pod \"crc-debug-tfz5b\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.609475 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-host\") pod \"crc-debug-tfz5b\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.630652 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lqfq\" (UniqueName: \"kubernetes.io/projected/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-kube-api-access-9lqfq\") pod \"crc-debug-tfz5b\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.792706 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tfz5b" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.961811 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ec5b63-8433-471e-8a86-d21652dd0367" path="/var/lib/kubelet/pods/30ec5b63-8433-471e-8a86-d21652dd0367/volumes" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.962871 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7cb25b1-db79-44b5-832b-bdeaf22c1c93" path="/var/lib/kubelet/pods/d7cb25b1-db79-44b5-832b-bdeaf22c1c93/volumes" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.963572 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddb2c431-3ba8-4973-935f-a1922bf220e7" path="/var/lib/kubelet/pods/ddb2c431-3ba8-4973-935f-a1922bf220e7/volumes" Jun 06 09:40:01 crc kubenswrapper[4911]: I0606 09:40:01.965034 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddbdc914-9643-4c5f-ae96-c519a85f1088" path="/var/lib/kubelet/pods/ddbdc914-9643-4c5f-ae96-c519a85f1088/volumes" Jun 06 09:40:02 crc kubenswrapper[4911]: I0606 09:40:02.529443 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tfz5b" event={"ID":"f5891fae-3baf-4bfe-a8aa-157c7cd2efff","Type":"ContainerStarted","Data":"c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51"} Jun 06 09:40:02 crc kubenswrapper[4911]: I0606 09:40:02.530066 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tfz5b" event={"ID":"f5891fae-3baf-4bfe-a8aa-157c7cd2efff","Type":"ContainerStarted","Data":"bcc30bf755c5f40a0bb95fab54e27cb5a232a11083de2c9a063e358fbac1c039"} Jun 06 09:40:02 crc kubenswrapper[4911]: I0606 09:40:02.545705 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-tfz5b" podStartSLOduration=1.5456780430000001 podStartE2EDuration="1.545678043s" podCreationTimestamp="2025-06-06 09:40:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:40:02.545489778 +0000 UTC m=+1613.820915331" watchObservedRunningTime="2025-06-06 09:40:02.545678043 +0000 UTC m=+1613.821103586" Jun 06 09:40:06 crc kubenswrapper[4911]: I0606 09:40:06.948820 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:40:06 crc kubenswrapper[4911]: E0606 09:40:06.949670 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:40:07 crc kubenswrapper[4911]: I0606 09:40:07.029410 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-9bdfb"] Jun 06 09:40:07 crc kubenswrapper[4911]: I0606 09:40:07.038631 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-9bdfb"] Jun 06 09:40:07 crc kubenswrapper[4911]: I0606 09:40:07.973201 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0dd5a73-4e05-440f-90e7-f432562f3c3d" path="/var/lib/kubelet/pods/c0dd5a73-4e05-440f-90e7-f432562f3c3d/volumes" Jun 06 09:40:11 crc kubenswrapper[4911]: I0606 09:40:11.037769 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-scb57"] Jun 06 09:40:11 crc kubenswrapper[4911]: I0606 09:40:11.049228 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-scb57"] Jun 06 09:40:11 crc kubenswrapper[4911]: I0606 09:40:11.961824 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273" path="/var/lib/kubelet/pods/2fe12fd5-efa0-47eb-b71d-6f1d3b4fb273/volumes" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.443568 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-tfz5b"] Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.444197 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-tfz5b" podUID="f5891fae-3baf-4bfe-a8aa-157c7cd2efff" containerName="container-00" containerID="cri-o://c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51" gracePeriod=2 Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.457107 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-tfz5b"] Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.548431 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tfz5b" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.624195 4911 generic.go:334] "Generic (PLEG): container finished" podID="f5891fae-3baf-4bfe-a8aa-157c7cd2efff" containerID="c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51" exitCode=0 Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.624316 4911 scope.go:117] "RemoveContainer" containerID="c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.624322 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tfz5b" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.645565 4911 scope.go:117] "RemoveContainer" containerID="c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51" Jun 06 09:40:12 crc kubenswrapper[4911]: E0606 09:40:12.645990 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51\": container with ID starting with c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51 not found: ID does not exist" containerID="c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.646031 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51"} err="failed to get container status \"c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51\": rpc error: code = NotFound desc = could not find container \"c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51\": container with ID starting with c7255b4205e68b14c2dd49fd9d7a0cd316ead32a405103854542e7dd818eac51 not found: ID does not exist" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.653083 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lqfq\" (UniqueName: \"kubernetes.io/projected/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-kube-api-access-9lqfq\") pod \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.653430 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-host\") pod \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\" (UID: \"f5891fae-3baf-4bfe-a8aa-157c7cd2efff\") " Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.653731 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-host" (OuterVolumeSpecName: "host") pod "f5891fae-3baf-4bfe-a8aa-157c7cd2efff" (UID: "f5891fae-3baf-4bfe-a8aa-157c7cd2efff"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.654347 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.659424 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-kube-api-access-9lqfq" (OuterVolumeSpecName: "kube-api-access-9lqfq") pod "f5891fae-3baf-4bfe-a8aa-157c7cd2efff" (UID: "f5891fae-3baf-4bfe-a8aa-157c7cd2efff"). InnerVolumeSpecName "kube-api-access-9lqfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:40:12 crc kubenswrapper[4911]: I0606 09:40:12.756642 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lqfq\" (UniqueName: \"kubernetes.io/projected/f5891fae-3baf-4bfe-a8aa-157c7cd2efff-kube-api-access-9lqfq\") on node \"crc\" DevicePath \"\"" Jun 06 09:40:13 crc kubenswrapper[4911]: I0606 09:40:13.960151 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5891fae-3baf-4bfe-a8aa-157c7cd2efff" path="/var/lib/kubelet/pods/f5891fae-3baf-4bfe-a8aa-157c7cd2efff/volumes" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.034411 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-11ea-account-create-k549j"] Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.045803 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-11ea-account-create-k549j"] Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.237209 4911 scope.go:117] "RemoveContainer" containerID="717f10e465cc9993058fdaaf3f0b6b81c900abe00cccb4fc71ad70340b05f9b9" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.263717 4911 scope.go:117] "RemoveContainer" containerID="c899ee871e6fae7e8d4689a0585665d2682d26b4415b4fd262d06a4d5bdf2101" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.315397 4911 scope.go:117] "RemoveContainer" containerID="1ae8b3b8370df73982d9226b37541aeeb89e407c696e939229bd306f6ae69751" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.391867 4911 scope.go:117] "RemoveContainer" containerID="d7142c835952404819d9d029db1b60e1da9014272f881569004cde8c2756f1a1" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.443803 4911 scope.go:117] "RemoveContainer" containerID="9b232eb244da3a38b119a4103d84dc2674cbb0148a18d069e7c4581e7289f8f0" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.481839 4911 scope.go:117] "RemoveContainer" containerID="48d8f5e9fd0733a4d38dc6298d856923ae7216d3e74eca49deee698133510592" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.525713 4911 scope.go:117] "RemoveContainer" containerID="b97422f12350163894fa2e3957699c6848e7a4d72d5c02f1ae7bb75f07e389fa" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.545742 4911 scope.go:117] "RemoveContainer" containerID="70ce0764fab7b3d425b2f2fbdcbc903d1ff831e7676cdc42946d25337c34bf72" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.598718 4911 scope.go:117] "RemoveContainer" containerID="dd9b3829522c91f7fe7428979e2e9e6c098817da4247700eb6574d202262d518" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.619712 4911 scope.go:117] "RemoveContainer" containerID="376ec9256b79fdda7406a82326a552ca9fbf73d28a69d28337ad4daddd28bbc6" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.639631 4911 scope.go:117] "RemoveContainer" containerID="856ec54ea2059eebbe914f198d485a2b18fa9ddb8ea24febdd8e7ed08cb80f4a" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.665182 4911 scope.go:117] "RemoveContainer" containerID="a86100116424caef4dcb237c217862df192b1fefa559f0b0768567ad1cf59bda" Jun 06 09:40:18 crc kubenswrapper[4911]: I0606 09:40:18.691674 4911 scope.go:117] "RemoveContainer" containerID="4b03684044688034919468ba18eabe35c026e00740058601862a5b57bded9866" Jun 06 09:40:19 crc kubenswrapper[4911]: I0606 09:40:19.960256 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:40:19 crc kubenswrapper[4911]: E0606 09:40:19.960629 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:40:19 crc kubenswrapper[4911]: I0606 09:40:19.960746 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="368d0127-18f0-4227-b93f-67c3df9d17af" path="/var/lib/kubelet/pods/368d0127-18f0-4227-b93f-67c3df9d17af/volumes" Jun 06 09:40:20 crc kubenswrapper[4911]: I0606 09:40:20.027232 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-0561-account-create-6k25c"] Jun 06 09:40:20 crc kubenswrapper[4911]: I0606 09:40:20.037083 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ed0-account-create-kt7pc"] Jun 06 09:40:20 crc kubenswrapper[4911]: I0606 09:40:20.045218 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5ed0-account-create-kt7pc"] Jun 06 09:40:20 crc kubenswrapper[4911]: I0606 09:40:20.053681 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-0561-account-create-6k25c"] Jun 06 09:40:21 crc kubenswrapper[4911]: I0606 09:40:21.029359 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-0587-account-create-5ljgg"] Jun 06 09:40:21 crc kubenswrapper[4911]: I0606 09:40:21.039248 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-0587-account-create-5ljgg"] Jun 06 09:40:21 crc kubenswrapper[4911]: I0606 09:40:21.959058 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91e36afb-76a0-41d2-85d2-716055b255a6" path="/var/lib/kubelet/pods/91e36afb-76a0-41d2-85d2-716055b255a6/volumes" Jun 06 09:40:21 crc kubenswrapper[4911]: I0606 09:40:21.959771 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e" path="/var/lib/kubelet/pods/9c79f7b4-86b0-446c-a05e-30ef9ae2cb3e/volumes" Jun 06 09:40:21 crc kubenswrapper[4911]: I0606 09:40:21.960317 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dd0a8e8-2790-448d-85ed-753fe1389bab" path="/var/lib/kubelet/pods/9dd0a8e8-2790-448d-85ed-753fe1389bab/volumes" Jun 06 09:40:24 crc kubenswrapper[4911]: I0606 09:40:24.033412 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-grtxs"] Jun 06 09:40:24 crc kubenswrapper[4911]: I0606 09:40:24.045973 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-grtxs"] Jun 06 09:40:25 crc kubenswrapper[4911]: I0606 09:40:25.960705 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099" path="/var/lib/kubelet/pods/8c1e1a64-5f16-4d9d-b3cb-5d3bf0e6c099/volumes" Jun 06 09:40:31 crc kubenswrapper[4911]: I0606 09:40:31.062173 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-cgpnm"] Jun 06 09:40:31 crc kubenswrapper[4911]: I0606 09:40:31.074429 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-cgpnm"] Jun 06 09:40:31 crc kubenswrapper[4911]: I0606 09:40:31.958951 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d576ab7b-1286-4a6f-b43a-183187d822c7" path="/var/lib/kubelet/pods/d576ab7b-1286-4a6f-b43a-183187d822c7/volumes" Jun 06 09:40:34 crc kubenswrapper[4911]: I0606 09:40:34.947435 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:40:34 crc kubenswrapper[4911]: E0606 09:40:34.948023 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:40:45 crc kubenswrapper[4911]: I0606 09:40:45.954203 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:40:45 crc kubenswrapper[4911]: E0606 09:40:45.955126 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:40:54 crc kubenswrapper[4911]: I0606 09:40:54.032336 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4hdj9"] Jun 06 09:40:54 crc kubenswrapper[4911]: I0606 09:40:54.042355 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4hdj9"] Jun 06 09:40:55 crc kubenswrapper[4911]: I0606 09:40:55.964293 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="952f6db6-b318-4bd1-8052-653fa6484d05" path="/var/lib/kubelet/pods/952f6db6-b318-4bd1-8052-653fa6484d05/volumes" Jun 06 09:40:57 crc kubenswrapper[4911]: I0606 09:40:57.949084 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:40:57 crc kubenswrapper[4911]: E0606 09:40:57.949972 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:41:01 crc kubenswrapper[4911]: I0606 09:41:01.793083 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-wzwdj"] Jun 06 09:41:01 crc kubenswrapper[4911]: E0606 09:41:01.794732 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5891fae-3baf-4bfe-a8aa-157c7cd2efff" containerName="container-00" Jun 06 09:41:01 crc kubenswrapper[4911]: I0606 09:41:01.794759 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5891fae-3baf-4bfe-a8aa-157c7cd2efff" containerName="container-00" Jun 06 09:41:01 crc kubenswrapper[4911]: I0606 09:41:01.795406 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5891fae-3baf-4bfe-a8aa-157c7cd2efff" containerName="container-00" Jun 06 09:41:01 crc kubenswrapper[4911]: I0606 09:41:01.796967 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-wzwdj" Jun 06 09:41:01 crc kubenswrapper[4911]: I0606 09:41:01.932002 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75t56\" (UniqueName: \"kubernetes.io/projected/669789eb-14ee-4c21-8830-0a90759431f0-kube-api-access-75t56\") pod \"crc-debug-wzwdj\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " pod="openstack/crc-debug-wzwdj" Jun 06 09:41:01 crc kubenswrapper[4911]: I0606 09:41:01.932191 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669789eb-14ee-4c21-8830-0a90759431f0-host\") pod \"crc-debug-wzwdj\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " pod="openstack/crc-debug-wzwdj" Jun 06 09:41:02 crc kubenswrapper[4911]: I0606 09:41:02.034753 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75t56\" (UniqueName: \"kubernetes.io/projected/669789eb-14ee-4c21-8830-0a90759431f0-kube-api-access-75t56\") pod \"crc-debug-wzwdj\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " pod="openstack/crc-debug-wzwdj" Jun 06 09:41:02 crc kubenswrapper[4911]: I0606 09:41:02.034841 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669789eb-14ee-4c21-8830-0a90759431f0-host\") pod \"crc-debug-wzwdj\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " pod="openstack/crc-debug-wzwdj" Jun 06 09:41:02 crc kubenswrapper[4911]: I0606 09:41:02.034980 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669789eb-14ee-4c21-8830-0a90759431f0-host\") pod \"crc-debug-wzwdj\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " pod="openstack/crc-debug-wzwdj" Jun 06 09:41:02 crc kubenswrapper[4911]: I0606 09:41:02.055590 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75t56\" (UniqueName: \"kubernetes.io/projected/669789eb-14ee-4c21-8830-0a90759431f0-kube-api-access-75t56\") pod \"crc-debug-wzwdj\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " pod="openstack/crc-debug-wzwdj" Jun 06 09:41:02 crc kubenswrapper[4911]: I0606 09:41:02.120753 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-wzwdj" Jun 06 09:41:03 crc kubenswrapper[4911]: I0606 09:41:03.101552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-wzwdj" event={"ID":"669789eb-14ee-4c21-8830-0a90759431f0","Type":"ContainerStarted","Data":"d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf"} Jun 06 09:41:03 crc kubenswrapper[4911]: I0606 09:41:03.103019 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-wzwdj" event={"ID":"669789eb-14ee-4c21-8830-0a90759431f0","Type":"ContainerStarted","Data":"cdbb9f8e24e8e7718b8e4e05ccd89b8fe49593bdeb455086b924ace89acb86d1"} Jun 06 09:41:03 crc kubenswrapper[4911]: I0606 09:41:03.122117 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-wzwdj" podStartSLOduration=2.122081999 podStartE2EDuration="2.122081999s" podCreationTimestamp="2025-06-06 09:41:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:41:03.1182144 +0000 UTC m=+1674.393639953" watchObservedRunningTime="2025-06-06 09:41:03.122081999 +0000 UTC m=+1674.397507542" Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.672865 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-wzwdj"] Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.674017 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-wzwdj" podUID="669789eb-14ee-4c21-8830-0a90759431f0" containerName="container-00" containerID="cri-o://d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf" gracePeriod=2 Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.682488 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-wzwdj"] Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.770853 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-wzwdj" Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.863133 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75t56\" (UniqueName: \"kubernetes.io/projected/669789eb-14ee-4c21-8830-0a90759431f0-kube-api-access-75t56\") pod \"669789eb-14ee-4c21-8830-0a90759431f0\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.863279 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669789eb-14ee-4c21-8830-0a90759431f0-host\") pod \"669789eb-14ee-4c21-8830-0a90759431f0\" (UID: \"669789eb-14ee-4c21-8830-0a90759431f0\") " Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.863383 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/669789eb-14ee-4c21-8830-0a90759431f0-host" (OuterVolumeSpecName: "host") pod "669789eb-14ee-4c21-8830-0a90759431f0" (UID: "669789eb-14ee-4c21-8830-0a90759431f0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.863752 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669789eb-14ee-4c21-8830-0a90759431f0-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.873557 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/669789eb-14ee-4c21-8830-0a90759431f0-kube-api-access-75t56" (OuterVolumeSpecName: "kube-api-access-75t56") pod "669789eb-14ee-4c21-8830-0a90759431f0" (UID: "669789eb-14ee-4c21-8830-0a90759431f0"). InnerVolumeSpecName "kube-api-access-75t56". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.948675 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:41:12 crc kubenswrapper[4911]: E0606 09:41:12.948923 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:41:12 crc kubenswrapper[4911]: I0606 09:41:12.964645 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75t56\" (UniqueName: \"kubernetes.io/projected/669789eb-14ee-4c21-8830-0a90759431f0-kube-api-access-75t56\") on node \"crc\" DevicePath \"\"" Jun 06 09:41:13 crc kubenswrapper[4911]: I0606 09:41:13.190640 4911 generic.go:334] "Generic (PLEG): container finished" podID="669789eb-14ee-4c21-8830-0a90759431f0" containerID="d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf" exitCode=0 Jun 06 09:41:13 crc kubenswrapper[4911]: I0606 09:41:13.190703 4911 scope.go:117] "RemoveContainer" containerID="d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf" Jun 06 09:41:13 crc kubenswrapper[4911]: I0606 09:41:13.190754 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-wzwdj" Jun 06 09:41:13 crc kubenswrapper[4911]: I0606 09:41:13.212688 4911 scope.go:117] "RemoveContainer" containerID="d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf" Jun 06 09:41:13 crc kubenswrapper[4911]: E0606 09:41:13.214002 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf\": container with ID starting with d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf not found: ID does not exist" containerID="d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf" Jun 06 09:41:13 crc kubenswrapper[4911]: I0606 09:41:13.214034 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf"} err="failed to get container status \"d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf\": rpc error: code = NotFound desc = could not find container \"d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf\": container with ID starting with d4c516a5b5edb78150bbac9af4b89dae01dd547042f0bc1edd64923c4a6664bf not found: ID does not exist" Jun 06 09:41:13 crc kubenswrapper[4911]: I0606 09:41:13.959057 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="669789eb-14ee-4c21-8830-0a90759431f0" path="/var/lib/kubelet/pods/669789eb-14ee-4c21-8830-0a90759431f0/volumes" Jun 06 09:41:18 crc kubenswrapper[4911]: I0606 09:41:18.944840 4911 scope.go:117] "RemoveContainer" containerID="0cc039d4b3d67ca18a68997568fccacdb377707280cf0ae67b7b7cad903bb767" Jun 06 09:41:18 crc kubenswrapper[4911]: I0606 09:41:18.980413 4911 scope.go:117] "RemoveContainer" containerID="f95599ab53760cfaedb276e449a8553b62c570cde0593fb5ef35148c339bff5c" Jun 06 09:41:19 crc kubenswrapper[4911]: I0606 09:41:19.025053 4911 scope.go:117] "RemoveContainer" containerID="bc76585bc85553e0ac8f84005c4c0def79d4b0b5ad65d19abb7e4d4ca6ea9136" Jun 06 09:41:19 crc kubenswrapper[4911]: I0606 09:41:19.086574 4911 scope.go:117] "RemoveContainer" containerID="854febee88c5fb30b7120f26515beca8487550ded2f5e007328ec31cd97218e4" Jun 06 09:41:19 crc kubenswrapper[4911]: I0606 09:41:19.115375 4911 scope.go:117] "RemoveContainer" containerID="d4b09a0cc37e89240a9330950c83e463d7279fcb473aca0cf411d746afc7a51c" Jun 06 09:41:19 crc kubenswrapper[4911]: I0606 09:41:19.179136 4911 scope.go:117] "RemoveContainer" containerID="34457123d8c5b78aefa3d58c4496b63a36ec290335b890259c6da6cd11c9f1d8" Jun 06 09:41:19 crc kubenswrapper[4911]: I0606 09:41:19.211973 4911 scope.go:117] "RemoveContainer" containerID="1b2c399716ad1cfdc7242e7926cdecd5413c8c62c5920e9e9f67ff4276ff5b95" Jun 06 09:41:19 crc kubenswrapper[4911]: I0606 09:41:19.242476 4911 generic.go:334] "Generic (PLEG): container finished" podID="aac6d9d8-4525-494e-8866-215dba01a06c" containerID="715086899d9465e62ff93e47818abfe69b9473ad39f51808ead7466fe33be871" exitCode=0 Jun 06 09:41:19 crc kubenswrapper[4911]: I0606 09:41:19.242535 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" event={"ID":"aac6d9d8-4525-494e-8866-215dba01a06c","Type":"ContainerDied","Data":"715086899d9465e62ff93e47818abfe69b9473ad39f51808ead7466fe33be871"} Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.689959 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.826765 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-ssh-key\") pod \"aac6d9d8-4525-494e-8866-215dba01a06c\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.826938 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-inventory\") pod \"aac6d9d8-4525-494e-8866-215dba01a06c\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.826983 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsxq7\" (UniqueName: \"kubernetes.io/projected/aac6d9d8-4525-494e-8866-215dba01a06c-kube-api-access-hsxq7\") pod \"aac6d9d8-4525-494e-8866-215dba01a06c\" (UID: \"aac6d9d8-4525-494e-8866-215dba01a06c\") " Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.833336 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aac6d9d8-4525-494e-8866-215dba01a06c-kube-api-access-hsxq7" (OuterVolumeSpecName: "kube-api-access-hsxq7") pod "aac6d9d8-4525-494e-8866-215dba01a06c" (UID: "aac6d9d8-4525-494e-8866-215dba01a06c"). InnerVolumeSpecName "kube-api-access-hsxq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.857966 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aac6d9d8-4525-494e-8866-215dba01a06c" (UID: "aac6d9d8-4525-494e-8866-215dba01a06c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.859712 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-inventory" (OuterVolumeSpecName: "inventory") pod "aac6d9d8-4525-494e-8866-215dba01a06c" (UID: "aac6d9d8-4525-494e-8866-215dba01a06c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.930959 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.931015 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsxq7\" (UniqueName: \"kubernetes.io/projected/aac6d9d8-4525-494e-8866-215dba01a06c-kube-api-access-hsxq7\") on node \"crc\" DevicePath \"\"" Jun 06 09:41:20 crc kubenswrapper[4911]: I0606 09:41:20.931035 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aac6d9d8-4525-494e-8866-215dba01a06c-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.268734 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" event={"ID":"aac6d9d8-4525-494e-8866-215dba01a06c","Type":"ContainerDied","Data":"4604fcc3c2d1a927889a782005bf58faded17121faa96b9fa2786a496585fcb1"} Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.268774 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4604fcc3c2d1a927889a782005bf58faded17121faa96b9fa2786a496585fcb1" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.268792 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.334084 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9"] Jun 06 09:41:21 crc kubenswrapper[4911]: E0606 09:41:21.334559 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="669789eb-14ee-4c21-8830-0a90759431f0" containerName="container-00" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.334580 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="669789eb-14ee-4c21-8830-0a90759431f0" containerName="container-00" Jun 06 09:41:21 crc kubenswrapper[4911]: E0606 09:41:21.334602 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aac6d9d8-4525-494e-8866-215dba01a06c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.334611 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="aac6d9d8-4525-494e-8866-215dba01a06c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.334851 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="669789eb-14ee-4c21-8830-0a90759431f0" containerName="container-00" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.334878 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="aac6d9d8-4525-494e-8866-215dba01a06c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.335693 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.339460 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.339643 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.341686 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.341866 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.351430 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9"] Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.442044 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvsst\" (UniqueName: \"kubernetes.io/projected/23c4a939-99a1-4995-9bff-b48095f87e61-kube-api-access-mvsst\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.442452 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.442597 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.545489 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.545716 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvsst\" (UniqueName: \"kubernetes.io/projected/23c4a939-99a1-4995-9bff-b48095f87e61-kube-api-access-mvsst\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.545765 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.549992 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.550778 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.564174 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvsst\" (UniqueName: \"kubernetes.io/projected/23c4a939-99a1-4995-9bff-b48095f87e61-kube-api-access-mvsst\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-skcz9\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:21 crc kubenswrapper[4911]: I0606 09:41:21.663584 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:41:22 crc kubenswrapper[4911]: I0606 09:41:22.179956 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9"] Jun 06 09:41:22 crc kubenswrapper[4911]: I0606 09:41:22.278989 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" event={"ID":"23c4a939-99a1-4995-9bff-b48095f87e61","Type":"ContainerStarted","Data":"6081801716c00dd56fac1a4f317953f4d5380df22f68575b99b613ba87f8e6ae"} Jun 06 09:41:23 crc kubenswrapper[4911]: I0606 09:41:23.289717 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" event={"ID":"23c4a939-99a1-4995-9bff-b48095f87e61","Type":"ContainerStarted","Data":"e4ae21eba951e954376511cb2eeb5b08371a4198d83085ce5e653ba817eb5d32"} Jun 06 09:41:23 crc kubenswrapper[4911]: I0606 09:41:23.315854 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" podStartSLOduration=1.525545988 podStartE2EDuration="2.3158359s" podCreationTimestamp="2025-06-06 09:41:21 +0000 UTC" firstStartedPulling="2025-06-06 09:41:22.180549767 +0000 UTC m=+1693.455975310" lastFinishedPulling="2025-06-06 09:41:22.970839679 +0000 UTC m=+1694.246265222" observedRunningTime="2025-06-06 09:41:23.310215546 +0000 UTC m=+1694.585641119" watchObservedRunningTime="2025-06-06 09:41:23.3158359 +0000 UTC m=+1694.591261443" Jun 06 09:41:25 crc kubenswrapper[4911]: I0606 09:41:25.948813 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:41:25 crc kubenswrapper[4911]: E0606 09:41:25.949373 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:41:40 crc kubenswrapper[4911]: I0606 09:41:40.948692 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:41:40 crc kubenswrapper[4911]: E0606 09:41:40.949557 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:41:54 crc kubenswrapper[4911]: I0606 09:41:54.948451 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:41:54 crc kubenswrapper[4911]: E0606 09:41:54.949302 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.040582 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-69kzd"] Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.051189 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-69kzd"] Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.064255 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-w7fbb"] Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.065770 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.204538 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ac83d17-da31-44a1-891a-c018da9f61ea-host\") pod \"crc-debug-w7fbb\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.204711 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb86l\" (UniqueName: \"kubernetes.io/projected/8ac83d17-da31-44a1-891a-c018da9f61ea-kube-api-access-vb86l\") pod \"crc-debug-w7fbb\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.307751 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ac83d17-da31-44a1-891a-c018da9f61ea-host\") pod \"crc-debug-w7fbb\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.307950 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ac83d17-da31-44a1-891a-c018da9f61ea-host\") pod \"crc-debug-w7fbb\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.307995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb86l\" (UniqueName: \"kubernetes.io/projected/8ac83d17-da31-44a1-891a-c018da9f61ea-kube-api-access-vb86l\") pod \"crc-debug-w7fbb\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.328870 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb86l\" (UniqueName: \"kubernetes.io/projected/8ac83d17-da31-44a1-891a-c018da9f61ea-kube-api-access-vb86l\") pod \"crc-debug-w7fbb\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.393167 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-w7fbb" Jun 06 09:42:02 crc kubenswrapper[4911]: I0606 09:42:02.653172 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-w7fbb" event={"ID":"8ac83d17-da31-44a1-891a-c018da9f61ea","Type":"ContainerStarted","Data":"0c96a154a615cddbde6c15b663e56f27e9f8191d0192f0f3f44385a7970383ef"} Jun 06 09:42:03 crc kubenswrapper[4911]: I0606 09:42:03.663783 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-w7fbb" event={"ID":"8ac83d17-da31-44a1-891a-c018da9f61ea","Type":"ContainerStarted","Data":"79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03"} Jun 06 09:42:03 crc kubenswrapper[4911]: I0606 09:42:03.686485 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-w7fbb" podStartSLOduration=1.6864622470000001 podStartE2EDuration="1.686462247s" podCreationTimestamp="2025-06-06 09:42:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:42:03.678315928 +0000 UTC m=+1734.953741481" watchObservedRunningTime="2025-06-06 09:42:03.686462247 +0000 UTC m=+1734.961887800" Jun 06 09:42:03 crc kubenswrapper[4911]: I0606 09:42:03.964762 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c18bbba-7f7b-4601-a8d0-971323c798ac" path="/var/lib/kubelet/pods/4c18bbba-7f7b-4601-a8d0-971323c798ac/volumes" Jun 06 09:42:05 crc kubenswrapper[4911]: I0606 09:42:05.948391 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:42:05 crc kubenswrapper[4911]: E0606 09:42:05.949036 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:42:08 crc kubenswrapper[4911]: I0606 09:42:08.056133 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-q7cj8"] Jun 06 09:42:08 crc kubenswrapper[4911]: I0606 09:42:08.064172 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-q7cj8"] Jun 06 09:42:09 crc kubenswrapper[4911]: I0606 09:42:09.963142 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd2440f6-658c-4edb-938c-b40f2d3f7cf4" path="/var/lib/kubelet/pods/cd2440f6-658c-4edb-938c-b40f2d3f7cf4/volumes" Jun 06 09:42:10 crc kubenswrapper[4911]: I0606 09:42:10.031661 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-4j5dr"] Jun 06 09:42:10 crc kubenswrapper[4911]: I0606 09:42:10.040480 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-4j5dr"] Jun 06 09:42:11 crc kubenswrapper[4911]: I0606 09:42:11.961069 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ac55e69-d8fc-414b-add4-1d60dfcee487" path="/var/lib/kubelet/pods/2ac55e69-d8fc-414b-add4-1d60dfcee487/volumes" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.081786 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-w7fbb"] Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.082350 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-w7fbb" podUID="8ac83d17-da31-44a1-891a-c018da9f61ea" containerName="container-00" containerID="cri-o://79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03" gracePeriod=2 Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.089723 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-w7fbb"] Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.215347 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-w7fbb" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.349293 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ac83d17-da31-44a1-891a-c018da9f61ea-host\") pod \"8ac83d17-da31-44a1-891a-c018da9f61ea\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.349412 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb86l\" (UniqueName: \"kubernetes.io/projected/8ac83d17-da31-44a1-891a-c018da9f61ea-kube-api-access-vb86l\") pod \"8ac83d17-da31-44a1-891a-c018da9f61ea\" (UID: \"8ac83d17-da31-44a1-891a-c018da9f61ea\") " Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.349425 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ac83d17-da31-44a1-891a-c018da9f61ea-host" (OuterVolumeSpecName: "host") pod "8ac83d17-da31-44a1-891a-c018da9f61ea" (UID: "8ac83d17-da31-44a1-891a-c018da9f61ea"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.350151 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8ac83d17-da31-44a1-891a-c018da9f61ea-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.355333 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ac83d17-da31-44a1-891a-c018da9f61ea-kube-api-access-vb86l" (OuterVolumeSpecName: "kube-api-access-vb86l") pod "8ac83d17-da31-44a1-891a-c018da9f61ea" (UID: "8ac83d17-da31-44a1-891a-c018da9f61ea"). InnerVolumeSpecName "kube-api-access-vb86l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.452687 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb86l\" (UniqueName: \"kubernetes.io/projected/8ac83d17-da31-44a1-891a-c018da9f61ea-kube-api-access-vb86l\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.779865 4911 generic.go:334] "Generic (PLEG): container finished" podID="8ac83d17-da31-44a1-891a-c018da9f61ea" containerID="79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03" exitCode=0 Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.779955 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-w7fbb" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.779942 4911 scope.go:117] "RemoveContainer" containerID="79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.802808 4911 scope.go:117] "RemoveContainer" containerID="79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03" Jun 06 09:42:13 crc kubenswrapper[4911]: E0606 09:42:13.803407 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03\": container with ID starting with 79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03 not found: ID does not exist" containerID="79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.803465 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03"} err="failed to get container status \"79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03\": rpc error: code = NotFound desc = could not find container \"79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03\": container with ID starting with 79fd3b8e4790a08aadec925c60d2730b4f60090066bf49191ab774cc36e82d03 not found: ID does not exist" Jun 06 09:42:13 crc kubenswrapper[4911]: I0606 09:42:13.960379 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ac83d17-da31-44a1-891a-c018da9f61ea" path="/var/lib/kubelet/pods/8ac83d17-da31-44a1-891a-c018da9f61ea/volumes" Jun 06 09:42:16 crc kubenswrapper[4911]: I0606 09:42:16.947874 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:42:16 crc kubenswrapper[4911]: E0606 09:42:16.948386 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:42:19 crc kubenswrapper[4911]: I0606 09:42:19.374450 4911 scope.go:117] "RemoveContainer" containerID="369ffb0677feb789e37b0194280b2b91350f281ed675f45995e3c2387a20b451" Jun 06 09:42:19 crc kubenswrapper[4911]: I0606 09:42:19.411449 4911 scope.go:117] "RemoveContainer" containerID="a153e854a31def2185439edccebff0910042704552a20e9b6fb22051e5e2a96f" Jun 06 09:42:19 crc kubenswrapper[4911]: I0606 09:42:19.471968 4911 scope.go:117] "RemoveContainer" containerID="bd1af89fbfa306f687233c3fcb8a5174c0ecdb8f42c26ffc0255ea9c17ecf8e9" Jun 06 09:42:29 crc kubenswrapper[4911]: I0606 09:42:29.957063 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:42:29 crc kubenswrapper[4911]: E0606 09:42:29.957960 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:42:36 crc kubenswrapper[4911]: I0606 09:42:36.990532 4911 generic.go:334] "Generic (PLEG): container finished" podID="23c4a939-99a1-4995-9bff-b48095f87e61" containerID="e4ae21eba951e954376511cb2eeb5b08371a4198d83085ce5e653ba817eb5d32" exitCode=0 Jun 06 09:42:36 crc kubenswrapper[4911]: I0606 09:42:36.990650 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" event={"ID":"23c4a939-99a1-4995-9bff-b48095f87e61","Type":"ContainerDied","Data":"e4ae21eba951e954376511cb2eeb5b08371a4198d83085ce5e653ba817eb5d32"} Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.502666 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.586926 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvsst\" (UniqueName: \"kubernetes.io/projected/23c4a939-99a1-4995-9bff-b48095f87e61-kube-api-access-mvsst\") pod \"23c4a939-99a1-4995-9bff-b48095f87e61\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.587140 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory\") pod \"23c4a939-99a1-4995-9bff-b48095f87e61\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.587278 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-ssh-key\") pod \"23c4a939-99a1-4995-9bff-b48095f87e61\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.593629 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23c4a939-99a1-4995-9bff-b48095f87e61-kube-api-access-mvsst" (OuterVolumeSpecName: "kube-api-access-mvsst") pod "23c4a939-99a1-4995-9bff-b48095f87e61" (UID: "23c4a939-99a1-4995-9bff-b48095f87e61"). InnerVolumeSpecName "kube-api-access-mvsst". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:42:38 crc kubenswrapper[4911]: E0606 09:42:38.613402 4911 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory podName:23c4a939-99a1-4995-9bff-b48095f87e61 nodeName:}" failed. No retries permitted until 2025-06-06 09:42:39.113358829 +0000 UTC m=+1770.388784372 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory") pod "23c4a939-99a1-4995-9bff-b48095f87e61" (UID: "23c4a939-99a1-4995-9bff-b48095f87e61") : error deleting /var/lib/kubelet/pods/23c4a939-99a1-4995-9bff-b48095f87e61/volume-subpaths: remove /var/lib/kubelet/pods/23c4a939-99a1-4995-9bff-b48095f87e61/volume-subpaths: no such file or directory Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.618492 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "23c4a939-99a1-4995-9bff-b48095f87e61" (UID: "23c4a939-99a1-4995-9bff-b48095f87e61"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.690560 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvsst\" (UniqueName: \"kubernetes.io/projected/23c4a939-99a1-4995-9bff-b48095f87e61-kube-api-access-mvsst\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:38 crc kubenswrapper[4911]: I0606 09:42:38.690612 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.012894 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" event={"ID":"23c4a939-99a1-4995-9bff-b48095f87e61","Type":"ContainerDied","Data":"6081801716c00dd56fac1a4f317953f4d5380df22f68575b99b613ba87f8e6ae"} Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.012961 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6081801716c00dd56fac1a4f317953f4d5380df22f68575b99b613ba87f8e6ae" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.012985 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-skcz9" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.103202 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j"] Jun 06 09:42:39 crc kubenswrapper[4911]: E0606 09:42:39.105032 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c4a939-99a1-4995-9bff-b48095f87e61" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.105501 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c4a939-99a1-4995-9bff-b48095f87e61" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jun 06 09:42:39 crc kubenswrapper[4911]: E0606 09:42:39.105551 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ac83d17-da31-44a1-891a-c018da9f61ea" containerName="container-00" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.105572 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ac83d17-da31-44a1-891a-c018da9f61ea" containerName="container-00" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.106469 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="23c4a939-99a1-4995-9bff-b48095f87e61" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.106536 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ac83d17-da31-44a1-891a-c018da9f61ea" containerName="container-00" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.111149 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.115254 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory\") pod \"23c4a939-99a1-4995-9bff-b48095f87e61\" (UID: \"23c4a939-99a1-4995-9bff-b48095f87e61\") " Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.119778 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j"] Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.136156 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory" (OuterVolumeSpecName: "inventory") pod "23c4a939-99a1-4995-9bff-b48095f87e61" (UID: "23c4a939-99a1-4995-9bff-b48095f87e61"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.243281 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm4dz\" (UniqueName: \"kubernetes.io/projected/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-kube-api-access-gm4dz\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.243362 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.243400 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.243484 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c4a939-99a1-4995-9bff-b48095f87e61-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.345695 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm4dz\" (UniqueName: \"kubernetes.io/projected/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-kube-api-access-gm4dz\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.346242 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.346289 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.350796 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.350835 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.362990 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm4dz\" (UniqueName: \"kubernetes.io/projected/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-kube-api-access-gm4dz\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-67f5j\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:39 crc kubenswrapper[4911]: I0606 09:42:39.485944 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:40 crc kubenswrapper[4911]: I0606 09:42:40.012542 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j"] Jun 06 09:42:40 crc kubenswrapper[4911]: I0606 09:42:40.035133 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" event={"ID":"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb","Type":"ContainerStarted","Data":"22c3efefc380de26feb0d77aff0ec4dced5d3e87c6dda50a57e94294a88e2bda"} Jun 06 09:42:41 crc kubenswrapper[4911]: I0606 09:42:41.045760 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" event={"ID":"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb","Type":"ContainerStarted","Data":"f5b502acc8257afdcd23d3d24dcda736840965aaa60a12d57b26d5a944b152cc"} Jun 06 09:42:41 crc kubenswrapper[4911]: I0606 09:42:41.068862 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" podStartSLOduration=1.530144342 podStartE2EDuration="2.068837628s" podCreationTimestamp="2025-06-06 09:42:39 +0000 UTC" firstStartedPulling="2025-06-06 09:42:40.017810726 +0000 UTC m=+1771.293236269" lastFinishedPulling="2025-06-06 09:42:40.556504012 +0000 UTC m=+1771.831929555" observedRunningTime="2025-06-06 09:42:41.062994577 +0000 UTC m=+1772.338420120" watchObservedRunningTime="2025-06-06 09:42:41.068837628 +0000 UTC m=+1772.344263171" Jun 06 09:42:42 crc kubenswrapper[4911]: I0606 09:42:42.037589 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-56x7b"] Jun 06 09:42:42 crc kubenswrapper[4911]: I0606 09:42:42.046030 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-g64qx"] Jun 06 09:42:42 crc kubenswrapper[4911]: I0606 09:42:42.055074 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-v6s7j"] Jun 06 09:42:42 crc kubenswrapper[4911]: I0606 09:42:42.063174 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-56x7b"] Jun 06 09:42:42 crc kubenswrapper[4911]: I0606 09:42:42.073861 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-g64qx"] Jun 06 09:42:42 crc kubenswrapper[4911]: I0606 09:42:42.081332 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-v6s7j"] Jun 06 09:42:43 crc kubenswrapper[4911]: I0606 09:42:43.949037 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:42:43 crc kubenswrapper[4911]: E0606 09:42:43.950086 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:42:43 crc kubenswrapper[4911]: I0606 09:42:43.962601 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1528dab4-6c33-4923-8c32-9c2b39aee053" path="/var/lib/kubelet/pods/1528dab4-6c33-4923-8c32-9c2b39aee053/volumes" Jun 06 09:42:43 crc kubenswrapper[4911]: I0606 09:42:43.963182 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1bfa29c-daf6-4b0c-89e6-3704863677e3" path="/var/lib/kubelet/pods/a1bfa29c-daf6-4b0c-89e6-3704863677e3/volumes" Jun 06 09:42:43 crc kubenswrapper[4911]: I0606 09:42:43.963739 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0c460c5-5199-4a41-afb5-58b1bcbe0fa7" path="/var/lib/kubelet/pods/b0c460c5-5199-4a41-afb5-58b1bcbe0fa7/volumes" Jun 06 09:42:46 crc kubenswrapper[4911]: I0606 09:42:46.091200 4911 generic.go:334] "Generic (PLEG): container finished" podID="3edd2d01-6fd8-44a5-affd-c2a0d9c114cb" containerID="f5b502acc8257afdcd23d3d24dcda736840965aaa60a12d57b26d5a944b152cc" exitCode=0 Jun 06 09:42:46 crc kubenswrapper[4911]: I0606 09:42:46.091297 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" event={"ID":"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb","Type":"ContainerDied","Data":"f5b502acc8257afdcd23d3d24dcda736840965aaa60a12d57b26d5a944b152cc"} Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.543377 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.641255 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-ssh-key\") pod \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.641332 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gm4dz\" (UniqueName: \"kubernetes.io/projected/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-kube-api-access-gm4dz\") pod \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.641506 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-inventory\") pod \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\" (UID: \"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb\") " Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.647668 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-kube-api-access-gm4dz" (OuterVolumeSpecName: "kube-api-access-gm4dz") pod "3edd2d01-6fd8-44a5-affd-c2a0d9c114cb" (UID: "3edd2d01-6fd8-44a5-affd-c2a0d9c114cb"). InnerVolumeSpecName "kube-api-access-gm4dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.671548 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3edd2d01-6fd8-44a5-affd-c2a0d9c114cb" (UID: "3edd2d01-6fd8-44a5-affd-c2a0d9c114cb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.679723 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-inventory" (OuterVolumeSpecName: "inventory") pod "3edd2d01-6fd8-44a5-affd-c2a0d9c114cb" (UID: "3edd2d01-6fd8-44a5-affd-c2a0d9c114cb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.743367 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.743400 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gm4dz\" (UniqueName: \"kubernetes.io/projected/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-kube-api-access-gm4dz\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:47 crc kubenswrapper[4911]: I0606 09:42:47.743413 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3edd2d01-6fd8-44a5-affd-c2a0d9c114cb-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.111944 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" event={"ID":"3edd2d01-6fd8-44a5-affd-c2a0d9c114cb","Type":"ContainerDied","Data":"22c3efefc380de26feb0d77aff0ec4dced5d3e87c6dda50a57e94294a88e2bda"} Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.111986 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22c3efefc380de26feb0d77aff0ec4dced5d3e87c6dda50a57e94294a88e2bda" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.111989 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-67f5j" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.178928 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz"] Jun 06 09:42:48 crc kubenswrapper[4911]: E0606 09:42:48.179739 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3edd2d01-6fd8-44a5-affd-c2a0d9c114cb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.179769 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3edd2d01-6fd8-44a5-affd-c2a0d9c114cb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.180062 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3edd2d01-6fd8-44a5-affd-c2a0d9c114cb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.181245 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.184481 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.184807 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.184989 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.185205 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.188935 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz"] Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.357396 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxhcp\" (UniqueName: \"kubernetes.io/projected/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-kube-api-access-sxhcp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.357765 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.357821 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.459653 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxhcp\" (UniqueName: \"kubernetes.io/projected/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-kube-api-access-sxhcp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.459775 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.459800 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.464025 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.466391 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.478611 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxhcp\" (UniqueName: \"kubernetes.io/projected/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-kube-api-access-sxhcp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-z2ngz\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.499830 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:42:48 crc kubenswrapper[4911]: I0606 09:42:48.999743 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz"] Jun 06 09:42:49 crc kubenswrapper[4911]: I0606 09:42:49.123992 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" event={"ID":"e9d76de2-0187-44f1-b3e6-457dc1ca47e1","Type":"ContainerStarted","Data":"8c6d53f7f296defaf66a5da260e74040d2f7fab65e013bb412ee0ecaf283fd5a"} Jun 06 09:42:50 crc kubenswrapper[4911]: I0606 09:42:50.133003 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" event={"ID":"e9d76de2-0187-44f1-b3e6-457dc1ca47e1","Type":"ContainerStarted","Data":"5847c32398bb3f74092aeb7e614208803b7665ba92c38c418975ba0d0f0e4bfe"} Jun 06 09:42:50 crc kubenswrapper[4911]: I0606 09:42:50.150616 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" podStartSLOduration=1.756801661 podStartE2EDuration="2.150595207s" podCreationTimestamp="2025-06-06 09:42:48 +0000 UTC" firstStartedPulling="2025-06-06 09:42:49.004399047 +0000 UTC m=+1780.279824590" lastFinishedPulling="2025-06-06 09:42:49.398192593 +0000 UTC m=+1780.673618136" observedRunningTime="2025-06-06 09:42:50.147866596 +0000 UTC m=+1781.423292139" watchObservedRunningTime="2025-06-06 09:42:50.150595207 +0000 UTC m=+1781.426020750" Jun 06 09:42:58 crc kubenswrapper[4911]: I0606 09:42:58.947749 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:42:58 crc kubenswrapper[4911]: E0606 09:42:58.948585 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.049546 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-99ed-account-create-wm9cl"] Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.057668 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-e109-account-create-tl6lr"] Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.065133 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-99ed-account-create-wm9cl"] Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.073209 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-e109-account-create-tl6lr"] Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.524541 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-dsjlw"] Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.525809 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.623867 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkk8x\" (UniqueName: \"kubernetes.io/projected/1936ff68-add6-43e3-83e2-5351ffef0bcf-kube-api-access-pkk8x\") pod \"crc-debug-dsjlw\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.623960 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1936ff68-add6-43e3-83e2-5351ffef0bcf-host\") pod \"crc-debug-dsjlw\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.726248 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkk8x\" (UniqueName: \"kubernetes.io/projected/1936ff68-add6-43e3-83e2-5351ffef0bcf-kube-api-access-pkk8x\") pod \"crc-debug-dsjlw\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.726377 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1936ff68-add6-43e3-83e2-5351ffef0bcf-host\") pod \"crc-debug-dsjlw\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.726547 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1936ff68-add6-43e3-83e2-5351ffef0bcf-host\") pod \"crc-debug-dsjlw\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.747050 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkk8x\" (UniqueName: \"kubernetes.io/projected/1936ff68-add6-43e3-83e2-5351ffef0bcf-kube-api-access-pkk8x\") pod \"crc-debug-dsjlw\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.849793 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dsjlw" Jun 06 09:43:01 crc kubenswrapper[4911]: W0606 09:43:01.891441 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1936ff68_add6_43e3_83e2_5351ffef0bcf.slice/crio-266f3299fc8a612171f42021555aa990a58efd0c7ac7bd2892d96a388d630811 WatchSource:0}: Error finding container 266f3299fc8a612171f42021555aa990a58efd0c7ac7bd2892d96a388d630811: Status 404 returned error can't find the container with id 266f3299fc8a612171f42021555aa990a58efd0c7ac7bd2892d96a388d630811 Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.959584 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="234e8605-be3a-44ec-8888-be1ce11be223" path="/var/lib/kubelet/pods/234e8605-be3a-44ec-8888-be1ce11be223/volumes" Jun 06 09:43:01 crc kubenswrapper[4911]: I0606 09:43:01.960718 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="511fdc0d-3107-49da-aee6-58fcb3071264" path="/var/lib/kubelet/pods/511fdc0d-3107-49da-aee6-58fcb3071264/volumes" Jun 06 09:43:02 crc kubenswrapper[4911]: I0606 09:43:02.029192 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-14a8-account-create-74hbj"] Jun 06 09:43:02 crc kubenswrapper[4911]: I0606 09:43:02.037744 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-14a8-account-create-74hbj"] Jun 06 09:43:02 crc kubenswrapper[4911]: I0606 09:43:02.243653 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dsjlw" event={"ID":"1936ff68-add6-43e3-83e2-5351ffef0bcf","Type":"ContainerStarted","Data":"feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86"} Jun 06 09:43:02 crc kubenswrapper[4911]: I0606 09:43:02.243716 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dsjlw" event={"ID":"1936ff68-add6-43e3-83e2-5351ffef0bcf","Type":"ContainerStarted","Data":"266f3299fc8a612171f42021555aa990a58efd0c7ac7bd2892d96a388d630811"} Jun 06 09:43:02 crc kubenswrapper[4911]: I0606 09:43:02.261738 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-dsjlw" podStartSLOduration=1.261715319 podStartE2EDuration="1.261715319s" podCreationTimestamp="2025-06-06 09:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:43:02.254076392 +0000 UTC m=+1793.529501955" watchObservedRunningTime="2025-06-06 09:43:02.261715319 +0000 UTC m=+1793.537140862" Jun 06 09:43:03 crc kubenswrapper[4911]: I0606 09:43:03.959534 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11097c32-bd99-4810-b897-7edf75c5e2cb" path="/var/lib/kubelet/pods/11097c32-bd99-4810-b897-7edf75c5e2cb/volumes" Jun 06 09:43:10 crc kubenswrapper[4911]: I0606 09:43:10.949494 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:43:10 crc kubenswrapper[4911]: E0606 09:43:10.950260 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.439271 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-dsjlw"] Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.440212 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-dsjlw" podUID="1936ff68-add6-43e3-83e2-5351ffef0bcf" containerName="container-00" containerID="cri-o://feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86" gracePeriod=2 Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.450783 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-dsjlw"] Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.567794 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dsjlw" Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.684314 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkk8x\" (UniqueName: \"kubernetes.io/projected/1936ff68-add6-43e3-83e2-5351ffef0bcf-kube-api-access-pkk8x\") pod \"1936ff68-add6-43e3-83e2-5351ffef0bcf\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.685371 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1936ff68-add6-43e3-83e2-5351ffef0bcf-host\") pod \"1936ff68-add6-43e3-83e2-5351ffef0bcf\" (UID: \"1936ff68-add6-43e3-83e2-5351ffef0bcf\") " Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.685430 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1936ff68-add6-43e3-83e2-5351ffef0bcf-host" (OuterVolumeSpecName: "host") pod "1936ff68-add6-43e3-83e2-5351ffef0bcf" (UID: "1936ff68-add6-43e3-83e2-5351ffef0bcf"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.686260 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1936ff68-add6-43e3-83e2-5351ffef0bcf-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.691723 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1936ff68-add6-43e3-83e2-5351ffef0bcf-kube-api-access-pkk8x" (OuterVolumeSpecName: "kube-api-access-pkk8x") pod "1936ff68-add6-43e3-83e2-5351ffef0bcf" (UID: "1936ff68-add6-43e3-83e2-5351ffef0bcf"). InnerVolumeSpecName "kube-api-access-pkk8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:43:12 crc kubenswrapper[4911]: I0606 09:43:12.788369 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkk8x\" (UniqueName: \"kubernetes.io/projected/1936ff68-add6-43e3-83e2-5351ffef0bcf-kube-api-access-pkk8x\") on node \"crc\" DevicePath \"\"" Jun 06 09:43:13 crc kubenswrapper[4911]: I0606 09:43:13.356141 4911 generic.go:334] "Generic (PLEG): container finished" podID="1936ff68-add6-43e3-83e2-5351ffef0bcf" containerID="feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86" exitCode=0 Jun 06 09:43:13 crc kubenswrapper[4911]: I0606 09:43:13.356212 4911 scope.go:117] "RemoveContainer" containerID="feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86" Jun 06 09:43:13 crc kubenswrapper[4911]: I0606 09:43:13.356333 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dsjlw" Jun 06 09:43:13 crc kubenswrapper[4911]: I0606 09:43:13.383874 4911 scope.go:117] "RemoveContainer" containerID="feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86" Jun 06 09:43:13 crc kubenswrapper[4911]: E0606 09:43:13.384472 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86\": container with ID starting with feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86 not found: ID does not exist" containerID="feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86" Jun 06 09:43:13 crc kubenswrapper[4911]: I0606 09:43:13.384511 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86"} err="failed to get container status \"feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86\": rpc error: code = NotFound desc = could not find container \"feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86\": container with ID starting with feaf8e031237f22f190301427436a7fd9e170598eff6acb24cc1e8ab8839ee86 not found: ID does not exist" Jun 06 09:43:13 crc kubenswrapper[4911]: I0606 09:43:13.959406 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1936ff68-add6-43e3-83e2-5351ffef0bcf" path="/var/lib/kubelet/pods/1936ff68-add6-43e3-83e2-5351ffef0bcf/volumes" Jun 06 09:43:19 crc kubenswrapper[4911]: I0606 09:43:19.627063 4911 scope.go:117] "RemoveContainer" containerID="06d0629dd04f49c0b4ad482308bde30ca337e6b82fb1d94ac01fd6420df3649c" Jun 06 09:43:19 crc kubenswrapper[4911]: I0606 09:43:19.660989 4911 scope.go:117] "RemoveContainer" containerID="d29315941a7e83f8341dbf443ef1920fdf3141d7aff95e5c066114e936667348" Jun 06 09:43:19 crc kubenswrapper[4911]: I0606 09:43:19.722207 4911 scope.go:117] "RemoveContainer" containerID="c0a19fffd24d50c5bc7253901921e07792a3c62f8221bf1fc754d6223aa849c7" Jun 06 09:43:19 crc kubenswrapper[4911]: I0606 09:43:19.779920 4911 scope.go:117] "RemoveContainer" containerID="8739719d3d618f597405794249f3e87f976150ea7793e261e470d28b69c53c6f" Jun 06 09:43:19 crc kubenswrapper[4911]: I0606 09:43:19.832685 4911 scope.go:117] "RemoveContainer" containerID="1a5db47afbc0c1b7bc5be0030cc80b38ef2fae18ffb544710c409d65988c99a7" Jun 06 09:43:19 crc kubenswrapper[4911]: I0606 09:43:19.890076 4911 scope.go:117] "RemoveContainer" containerID="90aa699cc169549dcdc6a4e779dab49b54ed20ca772e353a836a4794accab471" Jun 06 09:43:22 crc kubenswrapper[4911]: I0606 09:43:22.948210 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:43:22 crc kubenswrapper[4911]: E0606 09:43:22.948740 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:43:26 crc kubenswrapper[4911]: I0606 09:43:26.497767 4911 generic.go:334] "Generic (PLEG): container finished" podID="e9d76de2-0187-44f1-b3e6-457dc1ca47e1" containerID="5847c32398bb3f74092aeb7e614208803b7665ba92c38c418975ba0d0f0e4bfe" exitCode=0 Jun 06 09:43:26 crc kubenswrapper[4911]: I0606 09:43:26.497873 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" event={"ID":"e9d76de2-0187-44f1-b3e6-457dc1ca47e1","Type":"ContainerDied","Data":"5847c32398bb3f74092aeb7e614208803b7665ba92c38c418975ba0d0f0e4bfe"} Jun 06 09:43:27 crc kubenswrapper[4911]: I0606 09:43:27.943318 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.040024 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6rrkj"] Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.047385 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6rrkj"] Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.124475 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxhcp\" (UniqueName: \"kubernetes.io/projected/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-kube-api-access-sxhcp\") pod \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.124590 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-inventory\") pod \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.124727 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-ssh-key\") pod \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\" (UID: \"e9d76de2-0187-44f1-b3e6-457dc1ca47e1\") " Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.131400 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-kube-api-access-sxhcp" (OuterVolumeSpecName: "kube-api-access-sxhcp") pod "e9d76de2-0187-44f1-b3e6-457dc1ca47e1" (UID: "e9d76de2-0187-44f1-b3e6-457dc1ca47e1"). InnerVolumeSpecName "kube-api-access-sxhcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.159295 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e9d76de2-0187-44f1-b3e6-457dc1ca47e1" (UID: "e9d76de2-0187-44f1-b3e6-457dc1ca47e1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.159523 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-inventory" (OuterVolumeSpecName: "inventory") pod "e9d76de2-0187-44f1-b3e6-457dc1ca47e1" (UID: "e9d76de2-0187-44f1-b3e6-457dc1ca47e1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.226940 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxhcp\" (UniqueName: \"kubernetes.io/projected/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-kube-api-access-sxhcp\") on node \"crc\" DevicePath \"\"" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.226989 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.227000 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9d76de2-0187-44f1-b3e6-457dc1ca47e1-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.519609 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" event={"ID":"e9d76de2-0187-44f1-b3e6-457dc1ca47e1","Type":"ContainerDied","Data":"8c6d53f7f296defaf66a5da260e74040d2f7fab65e013bb412ee0ecaf283fd5a"} Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.519662 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c6d53f7f296defaf66a5da260e74040d2f7fab65e013bb412ee0ecaf283fd5a" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.519722 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-z2ngz" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.601408 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw"] Jun 06 09:43:28 crc kubenswrapper[4911]: E0606 09:43:28.601946 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d76de2-0187-44f1-b3e6-457dc1ca47e1" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.601975 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d76de2-0187-44f1-b3e6-457dc1ca47e1" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:43:28 crc kubenswrapper[4911]: E0606 09:43:28.601995 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1936ff68-add6-43e3-83e2-5351ffef0bcf" containerName="container-00" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.602002 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1936ff68-add6-43e3-83e2-5351ffef0bcf" containerName="container-00" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.602292 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9d76de2-0187-44f1-b3e6-457dc1ca47e1" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.602323 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1936ff68-add6-43e3-83e2-5351ffef0bcf" containerName="container-00" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.603136 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.605544 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.606151 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.606937 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.608281 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.620922 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw"] Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.635525 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.635607 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbtgp\" (UniqueName: \"kubernetes.io/projected/4856f749-b866-41cf-bdc2-0a5c8b2fce43-kube-api-access-qbtgp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.635694 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.737650 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.737733 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbtgp\" (UniqueName: \"kubernetes.io/projected/4856f749-b866-41cf-bdc2-0a5c8b2fce43-kube-api-access-qbtgp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.737768 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.743624 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.743963 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.755660 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbtgp\" (UniqueName: \"kubernetes.io/projected/4856f749-b866-41cf-bdc2-0a5c8b2fce43-kube-api-access-qbtgp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z46kw\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:28 crc kubenswrapper[4911]: I0606 09:43:28.923473 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:43:29 crc kubenswrapper[4911]: I0606 09:43:29.454598 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw"] Jun 06 09:43:29 crc kubenswrapper[4911]: I0606 09:43:29.530390 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" event={"ID":"4856f749-b866-41cf-bdc2-0a5c8b2fce43","Type":"ContainerStarted","Data":"0e97ed37796da75d281d5cfa16d2ab056b087e7e1df3e5db8cd208570b9d07b2"} Jun 06 09:43:29 crc kubenswrapper[4911]: I0606 09:43:29.958535 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa" path="/var/lib/kubelet/pods/aa07fe9d-3188-4dae-b4f9-29e4a1efbbfa/volumes" Jun 06 09:43:31 crc kubenswrapper[4911]: I0606 09:43:31.548495 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" event={"ID":"4856f749-b866-41cf-bdc2-0a5c8b2fce43","Type":"ContainerStarted","Data":"9fbd351d508ad987a027338bbe3df60fba558a322d9e4bb2dc45e078b618a041"} Jun 06 09:43:31 crc kubenswrapper[4911]: I0606 09:43:31.574916 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" podStartSLOduration=2.722554386 podStartE2EDuration="3.57489359s" podCreationTimestamp="2025-06-06 09:43:28 +0000 UTC" firstStartedPulling="2025-06-06 09:43:29.461478131 +0000 UTC m=+1820.736903684" lastFinishedPulling="2025-06-06 09:43:30.313817345 +0000 UTC m=+1821.589242888" observedRunningTime="2025-06-06 09:43:31.565822705 +0000 UTC m=+1822.841248268" watchObservedRunningTime="2025-06-06 09:43:31.57489359 +0000 UTC m=+1822.850319133" Jun 06 09:43:35 crc kubenswrapper[4911]: I0606 09:43:35.949019 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:43:36 crc kubenswrapper[4911]: I0606 09:43:36.597667 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"90c33cab05764d4260fff87e3c1193421c452a61479539125aaaae13a635ce01"} Jun 06 09:43:53 crc kubenswrapper[4911]: I0606 09:43:53.039606 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-hbh54"] Jun 06 09:43:53 crc kubenswrapper[4911]: I0606 09:43:53.053257 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9fkt8"] Jun 06 09:43:53 crc kubenswrapper[4911]: I0606 09:43:53.060781 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-hbh54"] Jun 06 09:43:53 crc kubenswrapper[4911]: I0606 09:43:53.069975 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-9fkt8"] Jun 06 09:43:53 crc kubenswrapper[4911]: I0606 09:43:53.962496 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f5dd978-318c-4d2d-88f4-7c4b01712832" path="/var/lib/kubelet/pods/1f5dd978-318c-4d2d-88f4-7c4b01712832/volumes" Jun 06 09:43:53 crc kubenswrapper[4911]: I0606 09:43:53.963435 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca5e53cd-23ba-4460-80b0-4c2cca13773e" path="/var/lib/kubelet/pods/ca5e53cd-23ba-4460-80b0-4c2cca13773e/volumes" Jun 06 09:44:01 crc kubenswrapper[4911]: I0606 09:44:01.803246 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-z6gfk"] Jun 06 09:44:01 crc kubenswrapper[4911]: I0606 09:44:01.805184 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-z6gfk" Jun 06 09:44:01 crc kubenswrapper[4911]: I0606 09:44:01.923406 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5qwn\" (UniqueName: \"kubernetes.io/projected/2b0e0396-8693-4519-99cc-e14f027f59ba-kube-api-access-w5qwn\") pod \"crc-debug-z6gfk\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " pod="openstack/crc-debug-z6gfk" Jun 06 09:44:01 crc kubenswrapper[4911]: I0606 09:44:01.923920 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2b0e0396-8693-4519-99cc-e14f027f59ba-host\") pod \"crc-debug-z6gfk\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " pod="openstack/crc-debug-z6gfk" Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.025816 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2b0e0396-8693-4519-99cc-e14f027f59ba-host\") pod \"crc-debug-z6gfk\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " pod="openstack/crc-debug-z6gfk" Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.025954 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5qwn\" (UniqueName: \"kubernetes.io/projected/2b0e0396-8693-4519-99cc-e14f027f59ba-kube-api-access-w5qwn\") pod \"crc-debug-z6gfk\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " pod="openstack/crc-debug-z6gfk" Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.026063 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2b0e0396-8693-4519-99cc-e14f027f59ba-host\") pod \"crc-debug-z6gfk\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " pod="openstack/crc-debug-z6gfk" Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.051859 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5qwn\" (UniqueName: \"kubernetes.io/projected/2b0e0396-8693-4519-99cc-e14f027f59ba-kube-api-access-w5qwn\") pod \"crc-debug-z6gfk\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " pod="openstack/crc-debug-z6gfk" Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.133069 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-z6gfk" Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.872583 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-z6gfk" event={"ID":"2b0e0396-8693-4519-99cc-e14f027f59ba","Type":"ContainerStarted","Data":"e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1"} Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.872922 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-z6gfk" event={"ID":"2b0e0396-8693-4519-99cc-e14f027f59ba","Type":"ContainerStarted","Data":"f8728389e589e59984b68fc425b5e82f0518032be15296a02737bde3ae625190"} Jun 06 09:44:02 crc kubenswrapper[4911]: I0606 09:44:02.889994 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-z6gfk" podStartSLOduration=1.889976797 podStartE2EDuration="1.889976797s" podCreationTimestamp="2025-06-06 09:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:44:02.886244681 +0000 UTC m=+1854.161670224" watchObservedRunningTime="2025-06-06 09:44:02.889976797 +0000 UTC m=+1854.165402340" Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.779932 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-z6gfk"] Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.781285 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-z6gfk" podUID="2b0e0396-8693-4519-99cc-e14f027f59ba" containerName="container-00" containerID="cri-o://e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1" gracePeriod=2 Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.788473 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-z6gfk"] Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.895907 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-z6gfk" Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.962075 4911 generic.go:334] "Generic (PLEG): container finished" podID="2b0e0396-8693-4519-99cc-e14f027f59ba" containerID="e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1" exitCode=0 Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.962158 4911 scope.go:117] "RemoveContainer" containerID="e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1" Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.962162 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-z6gfk" Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.986241 4911 scope.go:117] "RemoveContainer" containerID="e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1" Jun 06 09:44:12 crc kubenswrapper[4911]: E0606 09:44:12.986672 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1\": container with ID starting with e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1 not found: ID does not exist" containerID="e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1" Jun 06 09:44:12 crc kubenswrapper[4911]: I0606 09:44:12.986712 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1"} err="failed to get container status \"e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1\": rpc error: code = NotFound desc = could not find container \"e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1\": container with ID starting with e65d3bf31002fba8ba253be60912a75c468d825c8a707d96027ac30ee60b09c1 not found: ID does not exist" Jun 06 09:44:13 crc kubenswrapper[4911]: I0606 09:44:13.043987 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2b0e0396-8693-4519-99cc-e14f027f59ba-host\") pod \"2b0e0396-8693-4519-99cc-e14f027f59ba\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " Jun 06 09:44:13 crc kubenswrapper[4911]: I0606 09:44:13.044136 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2b0e0396-8693-4519-99cc-e14f027f59ba-host" (OuterVolumeSpecName: "host") pod "2b0e0396-8693-4519-99cc-e14f027f59ba" (UID: "2b0e0396-8693-4519-99cc-e14f027f59ba"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:44:13 crc kubenswrapper[4911]: I0606 09:44:13.044235 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5qwn\" (UniqueName: \"kubernetes.io/projected/2b0e0396-8693-4519-99cc-e14f027f59ba-kube-api-access-w5qwn\") pod \"2b0e0396-8693-4519-99cc-e14f027f59ba\" (UID: \"2b0e0396-8693-4519-99cc-e14f027f59ba\") " Jun 06 09:44:13 crc kubenswrapper[4911]: I0606 09:44:13.045036 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2b0e0396-8693-4519-99cc-e14f027f59ba-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:13 crc kubenswrapper[4911]: I0606 09:44:13.051267 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b0e0396-8693-4519-99cc-e14f027f59ba-kube-api-access-w5qwn" (OuterVolumeSpecName: "kube-api-access-w5qwn") pod "2b0e0396-8693-4519-99cc-e14f027f59ba" (UID: "2b0e0396-8693-4519-99cc-e14f027f59ba"). InnerVolumeSpecName "kube-api-access-w5qwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:44:13 crc kubenswrapper[4911]: I0606 09:44:13.146695 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5qwn\" (UniqueName: \"kubernetes.io/projected/2b0e0396-8693-4519-99cc-e14f027f59ba-kube-api-access-w5qwn\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:13 crc kubenswrapper[4911]: I0606 09:44:13.960195 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b0e0396-8693-4519-99cc-e14f027f59ba" path="/var/lib/kubelet/pods/2b0e0396-8693-4519-99cc-e14f027f59ba/volumes" Jun 06 09:44:20 crc kubenswrapper[4911]: I0606 09:44:20.025482 4911 generic.go:334] "Generic (PLEG): container finished" podID="4856f749-b866-41cf-bdc2-0a5c8b2fce43" containerID="9fbd351d508ad987a027338bbe3df60fba558a322d9e4bb2dc45e078b618a041" exitCode=0 Jun 06 09:44:20 crc kubenswrapper[4911]: I0606 09:44:20.025573 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" event={"ID":"4856f749-b866-41cf-bdc2-0a5c8b2fce43","Type":"ContainerDied","Data":"9fbd351d508ad987a027338bbe3df60fba558a322d9e4bb2dc45e078b618a041"} Jun 06 09:44:20 crc kubenswrapper[4911]: I0606 09:44:20.108841 4911 scope.go:117] "RemoveContainer" containerID="90455f720a73953e46daa20e61e5cd68ecc7ddf301a441a4b34a908bfdd660ee" Jun 06 09:44:20 crc kubenswrapper[4911]: I0606 09:44:20.146638 4911 scope.go:117] "RemoveContainer" containerID="99f4b04fae1d8cb02541a0f900906221107c2423c0d4ace858cc013630231321" Jun 06 09:44:20 crc kubenswrapper[4911]: I0606 09:44:20.204569 4911 scope.go:117] "RemoveContainer" containerID="08ee614aae402d43cee536e54185e90f0702e570df1f08c863aa74b598aafdb9" Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.495635 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.664047 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-ssh-key\") pod \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.664151 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-inventory\") pod \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.664448 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbtgp\" (UniqueName: \"kubernetes.io/projected/4856f749-b866-41cf-bdc2-0a5c8b2fce43-kube-api-access-qbtgp\") pod \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\" (UID: \"4856f749-b866-41cf-bdc2-0a5c8b2fce43\") " Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.670562 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4856f749-b866-41cf-bdc2-0a5c8b2fce43-kube-api-access-qbtgp" (OuterVolumeSpecName: "kube-api-access-qbtgp") pod "4856f749-b866-41cf-bdc2-0a5c8b2fce43" (UID: "4856f749-b866-41cf-bdc2-0a5c8b2fce43"). InnerVolumeSpecName "kube-api-access-qbtgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.698255 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-inventory" (OuterVolumeSpecName: "inventory") pod "4856f749-b866-41cf-bdc2-0a5c8b2fce43" (UID: "4856f749-b866-41cf-bdc2-0a5c8b2fce43"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.702184 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4856f749-b866-41cf-bdc2-0a5c8b2fce43" (UID: "4856f749-b866-41cf-bdc2-0a5c8b2fce43"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.766273 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbtgp\" (UniqueName: \"kubernetes.io/projected/4856f749-b866-41cf-bdc2-0a5c8b2fce43-kube-api-access-qbtgp\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.766362 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:21 crc kubenswrapper[4911]: I0606 09:44:21.766377 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4856f749-b866-41cf-bdc2-0a5c8b2fce43-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.047916 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" event={"ID":"4856f749-b866-41cf-bdc2-0a5c8b2fce43","Type":"ContainerDied","Data":"0e97ed37796da75d281d5cfa16d2ab056b087e7e1df3e5db8cd208570b9d07b2"} Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.047992 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e97ed37796da75d281d5cfa16d2ab056b087e7e1df3e5db8cd208570b9d07b2" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.047992 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z46kw" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.126950 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d2w9b"] Jun 06 09:44:22 crc kubenswrapper[4911]: E0606 09:44:22.127637 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b0e0396-8693-4519-99cc-e14f027f59ba" containerName="container-00" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.127658 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b0e0396-8693-4519-99cc-e14f027f59ba" containerName="container-00" Jun 06 09:44:22 crc kubenswrapper[4911]: E0606 09:44:22.127682 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4856f749-b866-41cf-bdc2-0a5c8b2fce43" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.127692 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4856f749-b866-41cf-bdc2-0a5c8b2fce43" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.128003 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b0e0396-8693-4519-99cc-e14f027f59ba" containerName="container-00" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.128031 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4856f749-b866-41cf-bdc2-0a5c8b2fce43" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.128947 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.131229 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.131593 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.131819 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.132286 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.142006 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d2w9b"] Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.281534 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.281758 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.281816 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wzv5\" (UniqueName: \"kubernetes.io/projected/3942d3f6-c8b5-4092-8a22-8d3958955ab2-kube-api-access-5wzv5\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.383555 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.383637 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wzv5\" (UniqueName: \"kubernetes.io/projected/3942d3f6-c8b5-4092-8a22-8d3958955ab2-kube-api-access-5wzv5\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.383753 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.387787 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.388662 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.400576 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wzv5\" (UniqueName: \"kubernetes.io/projected/3942d3f6-c8b5-4092-8a22-8d3958955ab2-kube-api-access-5wzv5\") pod \"ssh-known-hosts-edpm-deployment-d2w9b\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.452295 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:22 crc kubenswrapper[4911]: I0606 09:44:22.962193 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d2w9b"] Jun 06 09:44:23 crc kubenswrapper[4911]: I0606 09:44:23.060071 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" event={"ID":"3942d3f6-c8b5-4092-8a22-8d3958955ab2","Type":"ContainerStarted","Data":"c3cc5a631f0a5c606978e091cd61df709e840f9847409a72107c9ccc10a4b23a"} Jun 06 09:44:24 crc kubenswrapper[4911]: I0606 09:44:24.070762 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" event={"ID":"3942d3f6-c8b5-4092-8a22-8d3958955ab2","Type":"ContainerStarted","Data":"f4f26c18c44ddba92be6903a0b3b89138632b122fd02cc1d82b946dc88de3a56"} Jun 06 09:44:24 crc kubenswrapper[4911]: I0606 09:44:24.095982 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" podStartSLOduration=1.6334194069999999 podStartE2EDuration="2.095959308s" podCreationTimestamp="2025-06-06 09:44:22 +0000 UTC" firstStartedPulling="2025-06-06 09:44:22.965481194 +0000 UTC m=+1874.240906737" lastFinishedPulling="2025-06-06 09:44:23.428021095 +0000 UTC m=+1874.703446638" observedRunningTime="2025-06-06 09:44:24.092414137 +0000 UTC m=+1875.367839680" watchObservedRunningTime="2025-06-06 09:44:24.095959308 +0000 UTC m=+1875.371384851" Jun 06 09:44:31 crc kubenswrapper[4911]: I0606 09:44:31.136732 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" event={"ID":"3942d3f6-c8b5-4092-8a22-8d3958955ab2","Type":"ContainerDied","Data":"f4f26c18c44ddba92be6903a0b3b89138632b122fd02cc1d82b946dc88de3a56"} Jun 06 09:44:31 crc kubenswrapper[4911]: I0606 09:44:31.136626 4911 generic.go:334] "Generic (PLEG): container finished" podID="3942d3f6-c8b5-4092-8a22-8d3958955ab2" containerID="f4f26c18c44ddba92be6903a0b3b89138632b122fd02cc1d82b946dc88de3a56" exitCode=0 Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.612038 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.707290 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-inventory-0\") pod \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.707553 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-ssh-key-openstack-edpm-ipam\") pod \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.707771 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wzv5\" (UniqueName: \"kubernetes.io/projected/3942d3f6-c8b5-4092-8a22-8d3958955ab2-kube-api-access-5wzv5\") pod \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\" (UID: \"3942d3f6-c8b5-4092-8a22-8d3958955ab2\") " Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.715896 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3942d3f6-c8b5-4092-8a22-8d3958955ab2-kube-api-access-5wzv5" (OuterVolumeSpecName: "kube-api-access-5wzv5") pod "3942d3f6-c8b5-4092-8a22-8d3958955ab2" (UID: "3942d3f6-c8b5-4092-8a22-8d3958955ab2"). InnerVolumeSpecName "kube-api-access-5wzv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.741352 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "3942d3f6-c8b5-4092-8a22-8d3958955ab2" (UID: "3942d3f6-c8b5-4092-8a22-8d3958955ab2"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.743546 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "3942d3f6-c8b5-4092-8a22-8d3958955ab2" (UID: "3942d3f6-c8b5-4092-8a22-8d3958955ab2"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.811345 4911 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-inventory-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.811393 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3942d3f6-c8b5-4092-8a22-8d3958955ab2-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:32 crc kubenswrapper[4911]: I0606 09:44:32.811407 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wzv5\" (UniqueName: \"kubernetes.io/projected/3942d3f6-c8b5-4092-8a22-8d3958955ab2-kube-api-access-5wzv5\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.155562 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" event={"ID":"3942d3f6-c8b5-4092-8a22-8d3958955ab2","Type":"ContainerDied","Data":"c3cc5a631f0a5c606978e091cd61df709e840f9847409a72107c9ccc10a4b23a"} Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.155881 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3cc5a631f0a5c606978e091cd61df709e840f9847409a72107c9ccc10a4b23a" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.155669 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d2w9b" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.252577 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq"] Jun 06 09:44:33 crc kubenswrapper[4911]: E0606 09:44:33.253270 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3942d3f6-c8b5-4092-8a22-8d3958955ab2" containerName="ssh-known-hosts-edpm-deployment" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.253346 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3942d3f6-c8b5-4092-8a22-8d3958955ab2" containerName="ssh-known-hosts-edpm-deployment" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.253577 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3942d3f6-c8b5-4092-8a22-8d3958955ab2" containerName="ssh-known-hosts-edpm-deployment" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.254526 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.261480 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.261762 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.262254 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.262371 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.271762 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq"] Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.321340 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ptmb\" (UniqueName: \"kubernetes.io/projected/4724e2d7-9618-4720-ba4c-31204929dbb4-kube-api-access-9ptmb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.321450 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.321490 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.423508 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.423730 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ptmb\" (UniqueName: \"kubernetes.io/projected/4724e2d7-9618-4720-ba4c-31204929dbb4-kube-api-access-9ptmb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.423826 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.427432 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.427747 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.442086 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ptmb\" (UniqueName: \"kubernetes.io/projected/4724e2d7-9618-4720-ba4c-31204929dbb4-kube-api-access-9ptmb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gbljq\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:33 crc kubenswrapper[4911]: I0606 09:44:33.573937 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:34 crc kubenswrapper[4911]: I0606 09:44:34.109506 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq"] Jun 06 09:44:34 crc kubenswrapper[4911]: I0606 09:44:34.114968 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:44:34 crc kubenswrapper[4911]: I0606 09:44:34.167749 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" event={"ID":"4724e2d7-9618-4720-ba4c-31204929dbb4","Type":"ContainerStarted","Data":"6c5b2e91c9c8bc2b42fddbe77b6633d0113f3b3e86de852c6ef9028adc090d98"} Jun 06 09:44:35 crc kubenswrapper[4911]: I0606 09:44:35.181489 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" event={"ID":"4724e2d7-9618-4720-ba4c-31204929dbb4","Type":"ContainerStarted","Data":"6fce0fd24b7442822138fe3f1d094bcf7a54f99880de107bf6ae9302163d3295"} Jun 06 09:44:35 crc kubenswrapper[4911]: I0606 09:44:35.204114 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" podStartSLOduration=1.6143876270000002 podStartE2EDuration="2.204079609s" podCreationTimestamp="2025-06-06 09:44:33 +0000 UTC" firstStartedPulling="2025-06-06 09:44:34.11467086 +0000 UTC m=+1885.390096403" lastFinishedPulling="2025-06-06 09:44:34.704362842 +0000 UTC m=+1885.979788385" observedRunningTime="2025-06-06 09:44:35.197522921 +0000 UTC m=+1886.472948464" watchObservedRunningTime="2025-06-06 09:44:35.204079609 +0000 UTC m=+1886.479505152" Jun 06 09:44:38 crc kubenswrapper[4911]: I0606 09:44:38.041104 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-jwx66"] Jun 06 09:44:38 crc kubenswrapper[4911]: I0606 09:44:38.050619 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-jwx66"] Jun 06 09:44:39 crc kubenswrapper[4911]: I0606 09:44:39.960747 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27435fe9-7ac4-4fbb-9137-dabee568caf7" path="/var/lib/kubelet/pods/27435fe9-7ac4-4fbb-9137-dabee568caf7/volumes" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.519813 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sqqmg"] Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.524152 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.532862 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sqqmg"] Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.585681 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-catalog-content\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.585780 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d2x6\" (UniqueName: \"kubernetes.io/projected/6f46e42b-8e65-4fcb-b27c-7968e10e9217-kube-api-access-5d2x6\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.585902 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-utilities\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.687514 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d2x6\" (UniqueName: \"kubernetes.io/projected/6f46e42b-8e65-4fcb-b27c-7968e10e9217-kube-api-access-5d2x6\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.687635 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-utilities\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.687762 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-catalog-content\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.688254 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-utilities\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.688304 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-catalog-content\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.716480 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d2x6\" (UniqueName: \"kubernetes.io/projected/6f46e42b-8e65-4fcb-b27c-7968e10e9217-kube-api-access-5d2x6\") pod \"redhat-operators-sqqmg\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:45 crc kubenswrapper[4911]: I0606 09:44:45.860662 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:46 crc kubenswrapper[4911]: I0606 09:44:46.381416 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sqqmg"] Jun 06 09:44:47 crc kubenswrapper[4911]: I0606 09:44:47.288872 4911 generic.go:334] "Generic (PLEG): container finished" podID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerID="d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33" exitCode=0 Jun 06 09:44:47 crc kubenswrapper[4911]: I0606 09:44:47.288925 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sqqmg" event={"ID":"6f46e42b-8e65-4fcb-b27c-7968e10e9217","Type":"ContainerDied","Data":"d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33"} Jun 06 09:44:47 crc kubenswrapper[4911]: I0606 09:44:47.289189 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sqqmg" event={"ID":"6f46e42b-8e65-4fcb-b27c-7968e10e9217","Type":"ContainerStarted","Data":"43e466937b55fdd20299c45fea2962a46393739b515c4bdf00a23ee227f8dfd5"} Jun 06 09:44:49 crc kubenswrapper[4911]: I0606 09:44:49.308777 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sqqmg" event={"ID":"6f46e42b-8e65-4fcb-b27c-7968e10e9217","Type":"ContainerStarted","Data":"c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee"} Jun 06 09:44:50 crc kubenswrapper[4911]: I0606 09:44:50.329175 4911 generic.go:334] "Generic (PLEG): container finished" podID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerID="c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee" exitCode=0 Jun 06 09:44:50 crc kubenswrapper[4911]: I0606 09:44:50.329743 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sqqmg" event={"ID":"6f46e42b-8e65-4fcb-b27c-7968e10e9217","Type":"ContainerDied","Data":"c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee"} Jun 06 09:44:51 crc kubenswrapper[4911]: I0606 09:44:51.350355 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sqqmg" event={"ID":"6f46e42b-8e65-4fcb-b27c-7968e10e9217","Type":"ContainerStarted","Data":"640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5"} Jun 06 09:44:51 crc kubenswrapper[4911]: I0606 09:44:51.376799 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sqqmg" podStartSLOduration=2.6285377580000002 podStartE2EDuration="6.3767796s" podCreationTimestamp="2025-06-06 09:44:45 +0000 UTC" firstStartedPulling="2025-06-06 09:44:47.290966966 +0000 UTC m=+1898.566392509" lastFinishedPulling="2025-06-06 09:44:51.039208818 +0000 UTC m=+1902.314634351" observedRunningTime="2025-06-06 09:44:51.365767822 +0000 UTC m=+1902.641193375" watchObservedRunningTime="2025-06-06 09:44:51.3767796 +0000 UTC m=+1902.652205143" Jun 06 09:44:55 crc kubenswrapper[4911]: I0606 09:44:55.860952 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:55 crc kubenswrapper[4911]: I0606 09:44:55.861341 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:55 crc kubenswrapper[4911]: I0606 09:44:55.918085 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:56 crc kubenswrapper[4911]: I0606 09:44:56.467216 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:56 crc kubenswrapper[4911]: I0606 09:44:56.520766 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sqqmg"] Jun 06 09:44:57 crc kubenswrapper[4911]: I0606 09:44:57.413947 4911 generic.go:334] "Generic (PLEG): container finished" podID="4724e2d7-9618-4720-ba4c-31204929dbb4" containerID="6fce0fd24b7442822138fe3f1d094bcf7a54f99880de107bf6ae9302163d3295" exitCode=0 Jun 06 09:44:57 crc kubenswrapper[4911]: I0606 09:44:57.414051 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" event={"ID":"4724e2d7-9618-4720-ba4c-31204929dbb4","Type":"ContainerDied","Data":"6fce0fd24b7442822138fe3f1d094bcf7a54f99880de107bf6ae9302163d3295"} Jun 06 09:44:58 crc kubenswrapper[4911]: I0606 09:44:58.423402 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sqqmg" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="registry-server" containerID="cri-o://640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5" gracePeriod=2 Jun 06 09:44:58 crc kubenswrapper[4911]: I0606 09:44:58.987932 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:58 crc kubenswrapper[4911]: I0606 09:44:58.999720 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.090864 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ptmb\" (UniqueName: \"kubernetes.io/projected/4724e2d7-9618-4720-ba4c-31204929dbb4-kube-api-access-9ptmb\") pod \"4724e2d7-9618-4720-ba4c-31204929dbb4\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.091039 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-inventory\") pod \"4724e2d7-9618-4720-ba4c-31204929dbb4\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.091130 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-ssh-key\") pod \"4724e2d7-9618-4720-ba4c-31204929dbb4\" (UID: \"4724e2d7-9618-4720-ba4c-31204929dbb4\") " Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.096620 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4724e2d7-9618-4720-ba4c-31204929dbb4-kube-api-access-9ptmb" (OuterVolumeSpecName: "kube-api-access-9ptmb") pod "4724e2d7-9618-4720-ba4c-31204929dbb4" (UID: "4724e2d7-9618-4720-ba4c-31204929dbb4"). InnerVolumeSpecName "kube-api-access-9ptmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.118260 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4724e2d7-9618-4720-ba4c-31204929dbb4" (UID: "4724e2d7-9618-4720-ba4c-31204929dbb4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.124476 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-inventory" (OuterVolumeSpecName: "inventory") pod "4724e2d7-9618-4720-ba4c-31204929dbb4" (UID: "4724e2d7-9618-4720-ba4c-31204929dbb4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.193355 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-utilities\") pod \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.193463 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-catalog-content\") pod \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.193596 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5d2x6\" (UniqueName: \"kubernetes.io/projected/6f46e42b-8e65-4fcb-b27c-7968e10e9217-kube-api-access-5d2x6\") pod \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\" (UID: \"6f46e42b-8e65-4fcb-b27c-7968e10e9217\") " Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.194196 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ptmb\" (UniqueName: \"kubernetes.io/projected/4724e2d7-9618-4720-ba4c-31204929dbb4-kube-api-access-9ptmb\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.194220 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.194230 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4724e2d7-9618-4720-ba4c-31204929dbb4-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.194386 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-utilities" (OuterVolumeSpecName: "utilities") pod "6f46e42b-8e65-4fcb-b27c-7968e10e9217" (UID: "6f46e42b-8e65-4fcb-b27c-7968e10e9217"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.197996 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f46e42b-8e65-4fcb-b27c-7968e10e9217-kube-api-access-5d2x6" (OuterVolumeSpecName: "kube-api-access-5d2x6") pod "6f46e42b-8e65-4fcb-b27c-7968e10e9217" (UID: "6f46e42b-8e65-4fcb-b27c-7968e10e9217"). InnerVolumeSpecName "kube-api-access-5d2x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.274337 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f46e42b-8e65-4fcb-b27c-7968e10e9217" (UID: "6f46e42b-8e65-4fcb-b27c-7968e10e9217"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.296627 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.296666 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f46e42b-8e65-4fcb-b27c-7968e10e9217-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.296681 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5d2x6\" (UniqueName: \"kubernetes.io/projected/6f46e42b-8e65-4fcb-b27c-7968e10e9217-kube-api-access-5d2x6\") on node \"crc\" DevicePath \"\"" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.433462 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" event={"ID":"4724e2d7-9618-4720-ba4c-31204929dbb4","Type":"ContainerDied","Data":"6c5b2e91c9c8bc2b42fddbe77b6633d0113f3b3e86de852c6ef9028adc090d98"} Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.433490 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gbljq" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.433500 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c5b2e91c9c8bc2b42fddbe77b6633d0113f3b3e86de852c6ef9028adc090d98" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.437246 4911 generic.go:334] "Generic (PLEG): container finished" podID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerID="640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5" exitCode=0 Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.437285 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sqqmg" event={"ID":"6f46e42b-8e65-4fcb-b27c-7968e10e9217","Type":"ContainerDied","Data":"640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5"} Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.437321 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sqqmg" event={"ID":"6f46e42b-8e65-4fcb-b27c-7968e10e9217","Type":"ContainerDied","Data":"43e466937b55fdd20299c45fea2962a46393739b515c4bdf00a23ee227f8dfd5"} Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.437345 4911 scope.go:117] "RemoveContainer" containerID="640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.437359 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sqqmg" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.483841 4911 scope.go:117] "RemoveContainer" containerID="c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.490154 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sqqmg"] Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.499346 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sqqmg"] Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.523183 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q"] Jun 06 09:44:59 crc kubenswrapper[4911]: E0606 09:44:59.524317 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4724e2d7-9618-4720-ba4c-31204929dbb4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.524354 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4724e2d7-9618-4720-ba4c-31204929dbb4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:44:59 crc kubenswrapper[4911]: E0606 09:44:59.524403 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="extract-utilities" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.524415 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="extract-utilities" Jun 06 09:44:59 crc kubenswrapper[4911]: E0606 09:44:59.524455 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="registry-server" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.524464 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="registry-server" Jun 06 09:44:59 crc kubenswrapper[4911]: E0606 09:44:59.524487 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="extract-content" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.524496 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="extract-content" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.525174 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4724e2d7-9618-4720-ba4c-31204929dbb4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.525226 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" containerName="registry-server" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.526463 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.529744 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.530671 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.531058 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.531643 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.537354 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q"] Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.540424 4911 scope.go:117] "RemoveContainer" containerID="d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.603702 4911 scope.go:117] "RemoveContainer" containerID="640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5" Jun 06 09:44:59 crc kubenswrapper[4911]: E0606 09:44:59.604413 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5\": container with ID starting with 640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5 not found: ID does not exist" containerID="640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.604441 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5"} err="failed to get container status \"640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5\": rpc error: code = NotFound desc = could not find container \"640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5\": container with ID starting with 640661486bb125dd17d1a587bb3f33f3adbbecefea1320225b79ae690f1b3bb5 not found: ID does not exist" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.604462 4911 scope.go:117] "RemoveContainer" containerID="c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee" Jun 06 09:44:59 crc kubenswrapper[4911]: E0606 09:44:59.606732 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee\": container with ID starting with c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee not found: ID does not exist" containerID="c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.606795 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee"} err="failed to get container status \"c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee\": rpc error: code = NotFound desc = could not find container \"c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee\": container with ID starting with c1f4ff20be6645ae8553b53d193fb3227a5fda1bf9bb08110648ba60d1ee8aee not found: ID does not exist" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.606830 4911 scope.go:117] "RemoveContainer" containerID="d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33" Jun 06 09:44:59 crc kubenswrapper[4911]: E0606 09:44:59.607307 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33\": container with ID starting with d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33 not found: ID does not exist" containerID="d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.607379 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33"} err="failed to get container status \"d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33\": rpc error: code = NotFound desc = could not find container \"d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33\": container with ID starting with d948157f000a6163f4572fee24750e67018e7ec2d8a4777828617d252cefee33 not found: ID does not exist" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.623489 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.624074 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p29g\" (UniqueName: \"kubernetes.io/projected/ae48d921-6c61-4025-b5fb-1d23ffb85636-kube-api-access-9p29g\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.624192 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.726610 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.726745 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p29g\" (UniqueName: \"kubernetes.io/projected/ae48d921-6c61-4025-b5fb-1d23ffb85636-kube-api-access-9p29g\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.726780 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.735624 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.735648 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.748841 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p29g\" (UniqueName: \"kubernetes.io/projected/ae48d921-6c61-4025-b5fb-1d23ffb85636-kube-api-access-9p29g\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.949338 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:44:59 crc kubenswrapper[4911]: I0606 09:44:59.961886 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f46e42b-8e65-4fcb-b27c-7968e10e9217" path="/var/lib/kubelet/pods/6f46e42b-8e65-4fcb-b27c-7968e10e9217/volumes" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.141545 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6"] Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.148336 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.150648 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.150997 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.152302 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6"] Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.239631 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/290d083a-2027-4f99-b6ab-6432beb132f6-config-volume\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.240714 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/290d083a-2027-4f99-b6ab-6432beb132f6-secret-volume\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.241071 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp5rz\" (UniqueName: \"kubernetes.io/projected/290d083a-2027-4f99-b6ab-6432beb132f6-kube-api-access-zp5rz\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.344678 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/290d083a-2027-4f99-b6ab-6432beb132f6-secret-volume\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.344905 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp5rz\" (UniqueName: \"kubernetes.io/projected/290d083a-2027-4f99-b6ab-6432beb132f6-kube-api-access-zp5rz\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.344959 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/290d083a-2027-4f99-b6ab-6432beb132f6-config-volume\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.346459 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/290d083a-2027-4f99-b6ab-6432beb132f6-config-volume\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.349528 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/290d083a-2027-4f99-b6ab-6432beb132f6-secret-volume\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.364915 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp5rz\" (UniqueName: \"kubernetes.io/projected/290d083a-2027-4f99-b6ab-6432beb132f6-kube-api-access-zp5rz\") pod \"collect-profiles-29153385-9gbp6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.489545 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.497641 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q"] Jun 06 09:45:00 crc kubenswrapper[4911]: W0606 09:45:00.512520 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae48d921_6c61_4025_b5fb_1d23ffb85636.slice/crio-3baaccc4467354406863161a49af6d62c82ea33fe3d1b7491a5c78721eafab06 WatchSource:0}: Error finding container 3baaccc4467354406863161a49af6d62c82ea33fe3d1b7491a5c78721eafab06: Status 404 returned error can't find the container with id 3baaccc4467354406863161a49af6d62c82ea33fe3d1b7491a5c78721eafab06 Jun 06 09:45:00 crc kubenswrapper[4911]: I0606 09:45:00.957789 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6"] Jun 06 09:45:00 crc kubenswrapper[4911]: W0606 09:45:00.960902 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod290d083a_2027_4f99_b6ab_6432beb132f6.slice/crio-aa5bf43a02cecc422aecf554d3d7d41545fd3d60db69bcc0afc90ba0bbb00fc0 WatchSource:0}: Error finding container aa5bf43a02cecc422aecf554d3d7d41545fd3d60db69bcc0afc90ba0bbb00fc0: Status 404 returned error can't find the container with id aa5bf43a02cecc422aecf554d3d7d41545fd3d60db69bcc0afc90ba0bbb00fc0 Jun 06 09:45:01 crc kubenswrapper[4911]: I0606 09:45:01.461660 4911 generic.go:334] "Generic (PLEG): container finished" podID="290d083a-2027-4f99-b6ab-6432beb132f6" containerID="afac12540d29e80df37169b6793b23c27ae049cec25944b35fe2e70bbbfb985f" exitCode=0 Jun 06 09:45:01 crc kubenswrapper[4911]: I0606 09:45:01.461740 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" event={"ID":"290d083a-2027-4f99-b6ab-6432beb132f6","Type":"ContainerDied","Data":"afac12540d29e80df37169b6793b23c27ae049cec25944b35fe2e70bbbfb985f"} Jun 06 09:45:01 crc kubenswrapper[4911]: I0606 09:45:01.461772 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" event={"ID":"290d083a-2027-4f99-b6ab-6432beb132f6","Type":"ContainerStarted","Data":"aa5bf43a02cecc422aecf554d3d7d41545fd3d60db69bcc0afc90ba0bbb00fc0"} Jun 06 09:45:01 crc kubenswrapper[4911]: I0606 09:45:01.464227 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" event={"ID":"ae48d921-6c61-4025-b5fb-1d23ffb85636","Type":"ContainerStarted","Data":"780d5a875b20273f3f49b39af636ef479ada7bcc051237e1da46c492504925e0"} Jun 06 09:45:01 crc kubenswrapper[4911]: I0606 09:45:01.465408 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" event={"ID":"ae48d921-6c61-4025-b5fb-1d23ffb85636","Type":"ContainerStarted","Data":"3baaccc4467354406863161a49af6d62c82ea33fe3d1b7491a5c78721eafab06"} Jun 06 09:45:01 crc kubenswrapper[4911]: I0606 09:45:01.499584 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" podStartSLOduration=2.033258423 podStartE2EDuration="2.499567717s" podCreationTimestamp="2025-06-06 09:44:59 +0000 UTC" firstStartedPulling="2025-06-06 09:45:00.516859864 +0000 UTC m=+1911.792285407" lastFinishedPulling="2025-06-06 09:45:00.983169158 +0000 UTC m=+1912.258594701" observedRunningTime="2025-06-06 09:45:01.495738093 +0000 UTC m=+1912.771163646" watchObservedRunningTime="2025-06-06 09:45:01.499567717 +0000 UTC m=+1912.774993260" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.170856 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-4g269"] Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.172695 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.290952 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebd372b9-d9f7-454b-a23e-61d9349efe3c-host\") pod \"crc-debug-4g269\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.291033 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld72t\" (UniqueName: \"kubernetes.io/projected/ebd372b9-d9f7-454b-a23e-61d9349efe3c-kube-api-access-ld72t\") pod \"crc-debug-4g269\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.392627 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebd372b9-d9f7-454b-a23e-61d9349efe3c-host\") pod \"crc-debug-4g269\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.392723 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld72t\" (UniqueName: \"kubernetes.io/projected/ebd372b9-d9f7-454b-a23e-61d9349efe3c-kube-api-access-ld72t\") pod \"crc-debug-4g269\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.392922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebd372b9-d9f7-454b-a23e-61d9349efe3c-host\") pod \"crc-debug-4g269\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.418881 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld72t\" (UniqueName: \"kubernetes.io/projected/ebd372b9-d9f7-454b-a23e-61d9349efe3c-kube-api-access-ld72t\") pod \"crc-debug-4g269\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.491899 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4g269" Jun 06 09:45:02 crc kubenswrapper[4911]: W0606 09:45:02.534463 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebd372b9_d9f7_454b_a23e_61d9349efe3c.slice/crio-e602c6ee33ef5a74fa8fdf6f877578ff338df766937cd8956e9e08ab035acb7e WatchSource:0}: Error finding container e602c6ee33ef5a74fa8fdf6f877578ff338df766937cd8956e9e08ab035acb7e: Status 404 returned error can't find the container with id e602c6ee33ef5a74fa8fdf6f877578ff338df766937cd8956e9e08ab035acb7e Jun 06 09:45:02 crc kubenswrapper[4911]: I0606 09:45:02.966352 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.006241 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/290d083a-2027-4f99-b6ab-6432beb132f6-config-volume\") pod \"290d083a-2027-4f99-b6ab-6432beb132f6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.006522 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp5rz\" (UniqueName: \"kubernetes.io/projected/290d083a-2027-4f99-b6ab-6432beb132f6-kube-api-access-zp5rz\") pod \"290d083a-2027-4f99-b6ab-6432beb132f6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.006669 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/290d083a-2027-4f99-b6ab-6432beb132f6-secret-volume\") pod \"290d083a-2027-4f99-b6ab-6432beb132f6\" (UID: \"290d083a-2027-4f99-b6ab-6432beb132f6\") " Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.008458 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/290d083a-2027-4f99-b6ab-6432beb132f6-config-volume" (OuterVolumeSpecName: "config-volume") pod "290d083a-2027-4f99-b6ab-6432beb132f6" (UID: "290d083a-2027-4f99-b6ab-6432beb132f6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.016541 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/290d083a-2027-4f99-b6ab-6432beb132f6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "290d083a-2027-4f99-b6ab-6432beb132f6" (UID: "290d083a-2027-4f99-b6ab-6432beb132f6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.016707 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/290d083a-2027-4f99-b6ab-6432beb132f6-kube-api-access-zp5rz" (OuterVolumeSpecName: "kube-api-access-zp5rz") pod "290d083a-2027-4f99-b6ab-6432beb132f6" (UID: "290d083a-2027-4f99-b6ab-6432beb132f6"). InnerVolumeSpecName "kube-api-access-zp5rz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.110216 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp5rz\" (UniqueName: \"kubernetes.io/projected/290d083a-2027-4f99-b6ab-6432beb132f6-kube-api-access-zp5rz\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.110264 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/290d083a-2027-4f99-b6ab-6432beb132f6-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.110278 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/290d083a-2027-4f99-b6ab-6432beb132f6-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.484359 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-4g269" event={"ID":"ebd372b9-d9f7-454b-a23e-61d9349efe3c","Type":"ContainerStarted","Data":"74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e"} Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.484419 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-4g269" event={"ID":"ebd372b9-d9f7-454b-a23e-61d9349efe3c","Type":"ContainerStarted","Data":"e602c6ee33ef5a74fa8fdf6f877578ff338df766937cd8956e9e08ab035acb7e"} Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.487794 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" event={"ID":"290d083a-2027-4f99-b6ab-6432beb132f6","Type":"ContainerDied","Data":"aa5bf43a02cecc422aecf554d3d7d41545fd3d60db69bcc0afc90ba0bbb00fc0"} Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.487853 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa5bf43a02cecc422aecf554d3d7d41545fd3d60db69bcc0afc90ba0bbb00fc0" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.487903 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6" Jun 06 09:45:03 crc kubenswrapper[4911]: I0606 09:45:03.507442 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-4g269" podStartSLOduration=1.507417571 podStartE2EDuration="1.507417571s" podCreationTimestamp="2025-06-06 09:45:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:45:03.499741004 +0000 UTC m=+1914.775166547" watchObservedRunningTime="2025-06-06 09:45:03.507417571 +0000 UTC m=+1914.782843124" Jun 06 09:45:04 crc kubenswrapper[4911]: I0606 09:45:04.052885 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c"] Jun 06 09:45:04 crc kubenswrapper[4911]: I0606 09:45:04.070943 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153340-7mt4c"] Jun 06 09:45:05 crc kubenswrapper[4911]: I0606 09:45:05.960021 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c36e9c0e-d7bb-4307-8767-c34651aeb7a8" path="/var/lib/kubelet/pods/c36e9c0e-d7bb-4307-8767-c34651aeb7a8/volumes" Jun 06 09:45:10 crc kubenswrapper[4911]: I0606 09:45:10.556830 4911 generic.go:334] "Generic (PLEG): container finished" podID="ae48d921-6c61-4025-b5fb-1d23ffb85636" containerID="780d5a875b20273f3f49b39af636ef479ada7bcc051237e1da46c492504925e0" exitCode=0 Jun 06 09:45:10 crc kubenswrapper[4911]: I0606 09:45:10.556978 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" event={"ID":"ae48d921-6c61-4025-b5fb-1d23ffb85636","Type":"ContainerDied","Data":"780d5a875b20273f3f49b39af636ef479ada7bcc051237e1da46c492504925e0"} Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.071034 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.187981 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-inventory\") pod \"ae48d921-6c61-4025-b5fb-1d23ffb85636\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.188168 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-ssh-key\") pod \"ae48d921-6c61-4025-b5fb-1d23ffb85636\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.188461 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9p29g\" (UniqueName: \"kubernetes.io/projected/ae48d921-6c61-4025-b5fb-1d23ffb85636-kube-api-access-9p29g\") pod \"ae48d921-6c61-4025-b5fb-1d23ffb85636\" (UID: \"ae48d921-6c61-4025-b5fb-1d23ffb85636\") " Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.194324 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae48d921-6c61-4025-b5fb-1d23ffb85636-kube-api-access-9p29g" (OuterVolumeSpecName: "kube-api-access-9p29g") pod "ae48d921-6c61-4025-b5fb-1d23ffb85636" (UID: "ae48d921-6c61-4025-b5fb-1d23ffb85636"). InnerVolumeSpecName "kube-api-access-9p29g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.220679 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ae48d921-6c61-4025-b5fb-1d23ffb85636" (UID: "ae48d921-6c61-4025-b5fb-1d23ffb85636"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.223016 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-inventory" (OuterVolumeSpecName: "inventory") pod "ae48d921-6c61-4025-b5fb-1d23ffb85636" (UID: "ae48d921-6c61-4025-b5fb-1d23ffb85636"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.292303 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.292353 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae48d921-6c61-4025-b5fb-1d23ffb85636-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.292368 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9p29g\" (UniqueName: \"kubernetes.io/projected/ae48d921-6c61-4025-b5fb-1d23ffb85636-kube-api-access-9p29g\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.579729 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" event={"ID":"ae48d921-6c61-4025-b5fb-1d23ffb85636","Type":"ContainerDied","Data":"3baaccc4467354406863161a49af6d62c82ea33fe3d1b7491a5c78721eafab06"} Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.580048 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3baaccc4467354406863161a49af6d62c82ea33fe3d1b7491a5c78721eafab06" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.579839 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.660631 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq"] Jun 06 09:45:12 crc kubenswrapper[4911]: E0606 09:45:12.661252 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae48d921-6c61-4025-b5fb-1d23ffb85636" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.661335 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae48d921-6c61-4025-b5fb-1d23ffb85636" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:45:12 crc kubenswrapper[4911]: E0606 09:45:12.661401 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="290d083a-2027-4f99-b6ab-6432beb132f6" containerName="collect-profiles" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.661485 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="290d083a-2027-4f99-b6ab-6432beb132f6" containerName="collect-profiles" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.661776 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="290d083a-2027-4f99-b6ab-6432beb132f6" containerName="collect-profiles" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.661849 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae48d921-6c61-4025-b5fb-1d23ffb85636" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.662602 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.665835 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.666179 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.666418 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.666670 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.666857 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.668440 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.669187 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.669382 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.677936 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq"] Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.702616 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.702886 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703003 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703219 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82prc\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-kube-api-access-82prc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703375 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703467 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703573 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703623 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.703699 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.704010 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.704069 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.704127 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.704231 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.805968 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806056 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806084 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806130 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806208 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806264 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806289 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806312 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806344 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806385 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82prc\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-kube-api-access-82prc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806433 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806464 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806497 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.806530 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.816571 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.817733 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.819406 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.819468 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.820011 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.823975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.825667 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.832706 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.832781 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.832939 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.835977 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.836603 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.837246 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.864316 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82prc\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-kube-api-access-82prc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hknjq\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:12 crc kubenswrapper[4911]: I0606 09:45:12.984501 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.199863 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-4g269"] Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.200518 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-4g269" podUID="ebd372b9-d9f7-454b-a23e-61d9349efe3c" containerName="container-00" containerID="cri-o://74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e" gracePeriod=2 Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.206295 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-4g269"] Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.301679 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4g269" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.316156 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld72t\" (UniqueName: \"kubernetes.io/projected/ebd372b9-d9f7-454b-a23e-61d9349efe3c-kube-api-access-ld72t\") pod \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.316450 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebd372b9-d9f7-454b-a23e-61d9349efe3c-host\") pod \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\" (UID: \"ebd372b9-d9f7-454b-a23e-61d9349efe3c\") " Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.316540 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebd372b9-d9f7-454b-a23e-61d9349efe3c-host" (OuterVolumeSpecName: "host") pod "ebd372b9-d9f7-454b-a23e-61d9349efe3c" (UID: "ebd372b9-d9f7-454b-a23e-61d9349efe3c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.316958 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebd372b9-d9f7-454b-a23e-61d9349efe3c-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.321261 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebd372b9-d9f7-454b-a23e-61d9349efe3c-kube-api-access-ld72t" (OuterVolumeSpecName: "kube-api-access-ld72t") pod "ebd372b9-d9f7-454b-a23e-61d9349efe3c" (UID: "ebd372b9-d9f7-454b-a23e-61d9349efe3c"). InnerVolumeSpecName "kube-api-access-ld72t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.419816 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld72t\" (UniqueName: \"kubernetes.io/projected/ebd372b9-d9f7-454b-a23e-61d9349efe3c-kube-api-access-ld72t\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.502645 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq"] Jun 06 09:45:13 crc kubenswrapper[4911]: W0606 09:45:13.503698 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffaebe67_a4d5_46d4_8bdc_d8bdddd58ff3.slice/crio-6a5da67c7a069c95ebbc25bf27727e2636fa0ecc94d868f3a93917aa6b77bf3e WatchSource:0}: Error finding container 6a5da67c7a069c95ebbc25bf27727e2636fa0ecc94d868f3a93917aa6b77bf3e: Status 404 returned error can't find the container with id 6a5da67c7a069c95ebbc25bf27727e2636fa0ecc94d868f3a93917aa6b77bf3e Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.592345 4911 generic.go:334] "Generic (PLEG): container finished" podID="ebd372b9-d9f7-454b-a23e-61d9349efe3c" containerID="74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e" exitCode=0 Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.592436 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4g269" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.592453 4911 scope.go:117] "RemoveContainer" containerID="74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.595553 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" event={"ID":"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3","Type":"ContainerStarted","Data":"6a5da67c7a069c95ebbc25bf27727e2636fa0ecc94d868f3a93917aa6b77bf3e"} Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.618234 4911 scope.go:117] "RemoveContainer" containerID="74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e" Jun 06 09:45:13 crc kubenswrapper[4911]: E0606 09:45:13.618745 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e\": container with ID starting with 74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e not found: ID does not exist" containerID="74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.618788 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e"} err="failed to get container status \"74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e\": rpc error: code = NotFound desc = could not find container \"74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e\": container with ID starting with 74fdb24ccae2529fb6c7ea2e82880f47b85c50f2dd535b3316bc795b52e2bb3e not found: ID does not exist" Jun 06 09:45:13 crc kubenswrapper[4911]: I0606 09:45:13.962201 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebd372b9-d9f7-454b-a23e-61d9349efe3c" path="/var/lib/kubelet/pods/ebd372b9-d9f7-454b-a23e-61d9349efe3c/volumes" Jun 06 09:45:14 crc kubenswrapper[4911]: I0606 09:45:14.608580 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" event={"ID":"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3","Type":"ContainerStarted","Data":"b4c16b9d8cbee7e925375dca80da462b66c6e50413e234423dee6efde5e0a824"} Jun 06 09:45:14 crc kubenswrapper[4911]: I0606 09:45:14.630878 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" podStartSLOduration=2.019561179 podStartE2EDuration="2.630856745s" podCreationTimestamp="2025-06-06 09:45:12 +0000 UTC" firstStartedPulling="2025-06-06 09:45:13.506348308 +0000 UTC m=+1924.781773851" lastFinishedPulling="2025-06-06 09:45:14.117643864 +0000 UTC m=+1925.393069417" observedRunningTime="2025-06-06 09:45:14.627014681 +0000 UTC m=+1925.902440224" watchObservedRunningTime="2025-06-06 09:45:14.630856745 +0000 UTC m=+1925.906282298" Jun 06 09:45:20 crc kubenswrapper[4911]: I0606 09:45:20.320556 4911 scope.go:117] "RemoveContainer" containerID="0f31b44c501e518dd703c0f9b16c9a1528de5ab578f7213db722bf3043893154" Jun 06 09:45:20 crc kubenswrapper[4911]: I0606 09:45:20.373379 4911 scope.go:117] "RemoveContainer" containerID="9c4a8bf775467270a658e0d090270788dd8d491291cea4b29ce48b234c2661cb" Jun 06 09:45:46 crc kubenswrapper[4911]: I0606 09:45:46.914427 4911 generic.go:334] "Generic (PLEG): container finished" podID="ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" containerID="b4c16b9d8cbee7e925375dca80da462b66c6e50413e234423dee6efde5e0a824" exitCode=0 Jun 06 09:45:46 crc kubenswrapper[4911]: I0606 09:45:46.914518 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" event={"ID":"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3","Type":"ContainerDied","Data":"b4c16b9d8cbee7e925375dca80da462b66c6e50413e234423dee6efde5e0a824"} Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.430657 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556423 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-inventory\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556574 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-bootstrap-combined-ca-bundle\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556674 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-telemetry-combined-ca-bundle\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556714 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ssh-key\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556748 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-libvirt-combined-ca-bundle\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556819 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ovn-combined-ca-bundle\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556882 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556906 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-repo-setup-combined-ca-bundle\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556945 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-nova-combined-ca-bundle\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.556980 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.557036 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82prc\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-kube-api-access-82prc\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.557068 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-neutron-metadata-combined-ca-bundle\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.557117 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.557150 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\" (UID: \"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3\") " Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.563640 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.564048 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-kube-api-access-82prc" (OuterVolumeSpecName: "kube-api-access-82prc") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "kube-api-access-82prc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.564256 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.564317 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.566131 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.566223 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.569968 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.582067 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.582946 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.583271 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.583315 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.585687 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.596406 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-inventory" (OuterVolumeSpecName: "inventory") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.600449 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" (UID: "ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662839 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82prc\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-kube-api-access-82prc\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662882 4911 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662897 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662910 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662923 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662933 4911 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662941 4911 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662949 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662957 4911 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662967 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662975 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662984 4911 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.662994 4911 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.663003 4911 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.937986 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" event={"ID":"ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3","Type":"ContainerDied","Data":"6a5da67c7a069c95ebbc25bf27727e2636fa0ecc94d868f3a93917aa6b77bf3e"} Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.938328 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a5da67c7a069c95ebbc25bf27727e2636fa0ecc94d868f3a93917aa6b77bf3e" Jun 06 09:45:48 crc kubenswrapper[4911]: I0606 09:45:48.938016 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hknjq" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.024286 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q"] Jun 06 09:45:49 crc kubenswrapper[4911]: E0606 09:45:49.024824 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.024846 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jun 06 09:45:49 crc kubenswrapper[4911]: E0606 09:45:49.024868 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebd372b9-d9f7-454b-a23e-61d9349efe3c" containerName="container-00" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.024877 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebd372b9-d9f7-454b-a23e-61d9349efe3c" containerName="container-00" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.025142 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebd372b9-d9f7-454b-a23e-61d9349efe3c" containerName="container-00" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.025188 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.026230 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.032943 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.033214 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.033614 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.033725 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.033775 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.056915 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q"] Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.173537 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crch2\" (UniqueName: \"kubernetes.io/projected/2947ca03-3408-495a-961c-9d548088ebe4-kube-api-access-crch2\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.173733 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2947ca03-3408-495a-961c-9d548088ebe4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.173780 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.173813 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.173859 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.276128 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2947ca03-3408-495a-961c-9d548088ebe4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.276215 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.276261 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.276304 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.276390 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crch2\" (UniqueName: \"kubernetes.io/projected/2947ca03-3408-495a-961c-9d548088ebe4-kube-api-access-crch2\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.278221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2947ca03-3408-495a-961c-9d548088ebe4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.281669 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.281745 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.294637 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crch2\" (UniqueName: \"kubernetes.io/projected/2947ca03-3408-495a-961c-9d548088ebe4-kube-api-access-crch2\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.302649 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qhv7q\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.356014 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.894673 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q"] Jun 06 09:45:49 crc kubenswrapper[4911]: I0606 09:45:49.962518 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" event={"ID":"2947ca03-3408-495a-961c-9d548088ebe4","Type":"ContainerStarted","Data":"76d30e03e7cecaea2f82a2725351fbf1ac6f231b43b867b2e997bbbb1cfa8791"} Jun 06 09:45:50 crc kubenswrapper[4911]: I0606 09:45:50.967367 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" event={"ID":"2947ca03-3408-495a-961c-9d548088ebe4","Type":"ContainerStarted","Data":"314f288b680127b9f08d4b2dc955be7ed8f93e55a3c44714457bc820af706eda"} Jun 06 09:45:50 crc kubenswrapper[4911]: I0606 09:45:50.987514 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" podStartSLOduration=1.5080308850000002 podStartE2EDuration="1.987490014s" podCreationTimestamp="2025-06-06 09:45:49 +0000 UTC" firstStartedPulling="2025-06-06 09:45:49.902542436 +0000 UTC m=+1961.177967979" lastFinishedPulling="2025-06-06 09:45:50.382001565 +0000 UTC m=+1961.657427108" observedRunningTime="2025-06-06 09:45:50.983675281 +0000 UTC m=+1962.259100834" watchObservedRunningTime="2025-06-06 09:45:50.987490014 +0000 UTC m=+1962.262915557" Jun 06 09:45:54 crc kubenswrapper[4911]: I0606 09:45:54.300437 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:45:54 crc kubenswrapper[4911]: I0606 09:45:54.301071 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.391806 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tbpjm"] Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.394914 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.413044 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tbpjm"] Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.524463 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-utilities\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.524516 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-catalog-content\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.524584 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dclng\" (UniqueName: \"kubernetes.io/projected/02a0ac65-f45f-40dd-a592-64a26253c821-kube-api-access-dclng\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.627637 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-utilities\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.627706 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-catalog-content\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.627771 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dclng\" (UniqueName: \"kubernetes.io/projected/02a0ac65-f45f-40dd-a592-64a26253c821-kube-api-access-dclng\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.628265 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-utilities\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.628323 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-catalog-content\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.651490 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dclng\" (UniqueName: \"kubernetes.io/projected/02a0ac65-f45f-40dd-a592-64a26253c821-kube-api-access-dclng\") pod \"redhat-marketplace-tbpjm\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:00 crc kubenswrapper[4911]: I0606 09:46:00.719195 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.201962 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tbpjm"] Jun 06 09:46:01 crc kubenswrapper[4911]: W0606 09:46:01.206718 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02a0ac65_f45f_40dd_a592_64a26253c821.slice/crio-392a2d6fbebe8e7f3f006cc30078e8b261f567877c07c9e9bcf488bef4fb19cb WatchSource:0}: Error finding container 392a2d6fbebe8e7f3f006cc30078e8b261f567877c07c9e9bcf488bef4fb19cb: Status 404 returned error can't find the container with id 392a2d6fbebe8e7f3f006cc30078e8b261f567877c07c9e9bcf488bef4fb19cb Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.600907 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-4xcpb"] Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.602601 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.753882 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-host\") pod \"crc-debug-4xcpb\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.754348 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzqk2\" (UniqueName: \"kubernetes.io/projected/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-kube-api-access-hzqk2\") pod \"crc-debug-4xcpb\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.857710 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzqk2\" (UniqueName: \"kubernetes.io/projected/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-kube-api-access-hzqk2\") pod \"crc-debug-4xcpb\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.857826 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-host\") pod \"crc-debug-4xcpb\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.858064 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-host\") pod \"crc-debug-4xcpb\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.879724 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzqk2\" (UniqueName: \"kubernetes.io/projected/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-kube-api-access-hzqk2\") pod \"crc-debug-4xcpb\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: I0606 09:46:01.927344 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4xcpb" Jun 06 09:46:01 crc kubenswrapper[4911]: W0606 09:46:01.982506 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4dc3c1f8_c885_442d_8773_ebd6dc7b6a49.slice/crio-a37513ccf0558b1b24b8e81716c1ec33bddbe6f8b2b6dcb49d069667b74cc7c1 WatchSource:0}: Error finding container a37513ccf0558b1b24b8e81716c1ec33bddbe6f8b2b6dcb49d069667b74cc7c1: Status 404 returned error can't find the container with id a37513ccf0558b1b24b8e81716c1ec33bddbe6f8b2b6dcb49d069667b74cc7c1 Jun 06 09:46:02 crc kubenswrapper[4911]: I0606 09:46:02.079610 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-4xcpb" event={"ID":"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49","Type":"ContainerStarted","Data":"a37513ccf0558b1b24b8e81716c1ec33bddbe6f8b2b6dcb49d069667b74cc7c1"} Jun 06 09:46:02 crc kubenswrapper[4911]: I0606 09:46:02.081938 4911 generic.go:334] "Generic (PLEG): container finished" podID="02a0ac65-f45f-40dd-a592-64a26253c821" containerID="24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5" exitCode=0 Jun 06 09:46:02 crc kubenswrapper[4911]: I0606 09:46:02.081994 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tbpjm" event={"ID":"02a0ac65-f45f-40dd-a592-64a26253c821","Type":"ContainerDied","Data":"24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5"} Jun 06 09:46:02 crc kubenswrapper[4911]: I0606 09:46:02.082050 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tbpjm" event={"ID":"02a0ac65-f45f-40dd-a592-64a26253c821","Type":"ContainerStarted","Data":"392a2d6fbebe8e7f3f006cc30078e8b261f567877c07c9e9bcf488bef4fb19cb"} Jun 06 09:46:03 crc kubenswrapper[4911]: I0606 09:46:03.103599 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-4xcpb" event={"ID":"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49","Type":"ContainerStarted","Data":"36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250"} Jun 06 09:46:03 crc kubenswrapper[4911]: I0606 09:46:03.106258 4911 generic.go:334] "Generic (PLEG): container finished" podID="02a0ac65-f45f-40dd-a592-64a26253c821" containerID="8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7" exitCode=0 Jun 06 09:46:03 crc kubenswrapper[4911]: I0606 09:46:03.106308 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tbpjm" event={"ID":"02a0ac65-f45f-40dd-a592-64a26253c821","Type":"ContainerDied","Data":"8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7"} Jun 06 09:46:03 crc kubenswrapper[4911]: I0606 09:46:03.124751 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-4xcpb" podStartSLOduration=2.124730442 podStartE2EDuration="2.124730442s" podCreationTimestamp="2025-06-06 09:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:46:03.11801101 +0000 UTC m=+1974.393436573" watchObservedRunningTime="2025-06-06 09:46:03.124730442 +0000 UTC m=+1974.400155995" Jun 06 09:46:04 crc kubenswrapper[4911]: I0606 09:46:04.118108 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tbpjm" event={"ID":"02a0ac65-f45f-40dd-a592-64a26253c821","Type":"ContainerStarted","Data":"72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d"} Jun 06 09:46:04 crc kubenswrapper[4911]: I0606 09:46:04.140136 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tbpjm" podStartSLOduration=2.698534243 podStartE2EDuration="4.140088788s" podCreationTimestamp="2025-06-06 09:46:00 +0000 UTC" firstStartedPulling="2025-06-06 09:46:02.085574402 +0000 UTC m=+1973.360999955" lastFinishedPulling="2025-06-06 09:46:03.527128947 +0000 UTC m=+1974.802554500" observedRunningTime="2025-06-06 09:46:04.136428929 +0000 UTC m=+1975.411854492" watchObservedRunningTime="2025-06-06 09:46:04.140088788 +0000 UTC m=+1975.415514331" Jun 06 09:46:10 crc kubenswrapper[4911]: I0606 09:46:10.719553 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:10 crc kubenswrapper[4911]: I0606 09:46:10.720147 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:10 crc kubenswrapper[4911]: I0606 09:46:10.773568 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:11 crc kubenswrapper[4911]: I0606 09:46:11.241460 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:11 crc kubenswrapper[4911]: I0606 09:46:11.340772 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tbpjm"] Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.606484 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-4xcpb"] Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.607220 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-4xcpb" podUID="4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" containerName="container-00" containerID="cri-o://36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250" gracePeriod=2 Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.619541 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-4xcpb"] Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.742867 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4xcpb" Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.906280 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzqk2\" (UniqueName: \"kubernetes.io/projected/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-kube-api-access-hzqk2\") pod \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.907344 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-host\") pod \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\" (UID: \"4dc3c1f8-c885-442d-8773-ebd6dc7b6a49\") " Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.907515 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-host" (OuterVolumeSpecName: "host") pod "4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" (UID: "4dc3c1f8-c885-442d-8773-ebd6dc7b6a49"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.908074 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:12 crc kubenswrapper[4911]: I0606 09:46:12.911914 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-kube-api-access-hzqk2" (OuterVolumeSpecName: "kube-api-access-hzqk2") pod "4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" (UID: "4dc3c1f8-c885-442d-8773-ebd6dc7b6a49"). InnerVolumeSpecName "kube-api-access-hzqk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.010505 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzqk2\" (UniqueName: \"kubernetes.io/projected/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49-kube-api-access-hzqk2\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.206010 4911 generic.go:334] "Generic (PLEG): container finished" podID="4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" containerID="36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250" exitCode=0 Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.206076 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4xcpb" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.206174 4911 scope.go:117] "RemoveContainer" containerID="36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.206893 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tbpjm" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="registry-server" containerID="cri-o://72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d" gracePeriod=2 Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.238143 4911 scope.go:117] "RemoveContainer" containerID="36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250" Jun 06 09:46:13 crc kubenswrapper[4911]: E0606 09:46:13.238782 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250\": container with ID starting with 36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250 not found: ID does not exist" containerID="36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.238822 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250"} err="failed to get container status \"36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250\": rpc error: code = NotFound desc = could not find container \"36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250\": container with ID starting with 36454b81930d63684291412cf0f47c7e6846d44a8a51273bc58f010f3a87d250 not found: ID does not exist" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.717674 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.827565 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-utilities\") pod \"02a0ac65-f45f-40dd-a592-64a26253c821\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.827765 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-catalog-content\") pod \"02a0ac65-f45f-40dd-a592-64a26253c821\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.827808 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dclng\" (UniqueName: \"kubernetes.io/projected/02a0ac65-f45f-40dd-a592-64a26253c821-kube-api-access-dclng\") pod \"02a0ac65-f45f-40dd-a592-64a26253c821\" (UID: \"02a0ac65-f45f-40dd-a592-64a26253c821\") " Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.828848 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-utilities" (OuterVolumeSpecName: "utilities") pod "02a0ac65-f45f-40dd-a592-64a26253c821" (UID: "02a0ac65-f45f-40dd-a592-64a26253c821"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.833959 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02a0ac65-f45f-40dd-a592-64a26253c821-kube-api-access-dclng" (OuterVolumeSpecName: "kube-api-access-dclng") pod "02a0ac65-f45f-40dd-a592-64a26253c821" (UID: "02a0ac65-f45f-40dd-a592-64a26253c821"). InnerVolumeSpecName "kube-api-access-dclng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.840740 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02a0ac65-f45f-40dd-a592-64a26253c821" (UID: "02a0ac65-f45f-40dd-a592-64a26253c821"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.930733 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.930779 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02a0ac65-f45f-40dd-a592-64a26253c821-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.930791 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dclng\" (UniqueName: \"kubernetes.io/projected/02a0ac65-f45f-40dd-a592-64a26253c821-kube-api-access-dclng\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:13 crc kubenswrapper[4911]: I0606 09:46:13.960319 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" path="/var/lib/kubelet/pods/4dc3c1f8-c885-442d-8773-ebd6dc7b6a49/volumes" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.222120 4911 generic.go:334] "Generic (PLEG): container finished" podID="02a0ac65-f45f-40dd-a592-64a26253c821" containerID="72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d" exitCode=0 Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.222212 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tbpjm" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.222204 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tbpjm" event={"ID":"02a0ac65-f45f-40dd-a592-64a26253c821","Type":"ContainerDied","Data":"72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d"} Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.222676 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tbpjm" event={"ID":"02a0ac65-f45f-40dd-a592-64a26253c821","Type":"ContainerDied","Data":"392a2d6fbebe8e7f3f006cc30078e8b261f567877c07c9e9bcf488bef4fb19cb"} Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.222746 4911 scope.go:117] "RemoveContainer" containerID="72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.246880 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tbpjm"] Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.247685 4911 scope.go:117] "RemoveContainer" containerID="8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.258222 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tbpjm"] Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.271795 4911 scope.go:117] "RemoveContainer" containerID="24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.320146 4911 scope.go:117] "RemoveContainer" containerID="72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d" Jun 06 09:46:14 crc kubenswrapper[4911]: E0606 09:46:14.320719 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d\": container with ID starting with 72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d not found: ID does not exist" containerID="72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.320804 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d"} err="failed to get container status \"72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d\": rpc error: code = NotFound desc = could not find container \"72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d\": container with ID starting with 72b5aba341bad0c4da4071aafc82cf03a2436a767c65bed49c1ddcdab726834d not found: ID does not exist" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.320860 4911 scope.go:117] "RemoveContainer" containerID="8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7" Jun 06 09:46:14 crc kubenswrapper[4911]: E0606 09:46:14.321900 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7\": container with ID starting with 8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7 not found: ID does not exist" containerID="8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.321952 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7"} err="failed to get container status \"8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7\": rpc error: code = NotFound desc = could not find container \"8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7\": container with ID starting with 8f042e3447d8f679f8ffdbdbfb1b4de227cce308cec74bf6929e4acc319683b7 not found: ID does not exist" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.322005 4911 scope.go:117] "RemoveContainer" containerID="24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5" Jun 06 09:46:14 crc kubenswrapper[4911]: E0606 09:46:14.323069 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5\": container with ID starting with 24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5 not found: ID does not exist" containerID="24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5" Jun 06 09:46:14 crc kubenswrapper[4911]: I0606 09:46:14.323182 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5"} err="failed to get container status \"24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5\": rpc error: code = NotFound desc = could not find container \"24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5\": container with ID starting with 24875246bcefef1254b019ef979cf47b009257f5b7310506bc26189c047b67b5 not found: ID does not exist" Jun 06 09:46:15 crc kubenswrapper[4911]: I0606 09:46:15.961039 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" path="/var/lib/kubelet/pods/02a0ac65-f45f-40dd-a592-64a26253c821/volumes" Jun 06 09:46:24 crc kubenswrapper[4911]: I0606 09:46:24.300871 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:46:24 crc kubenswrapper[4911]: I0606 09:46:24.301433 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.390173 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kp9r9"] Jun 06 09:46:32 crc kubenswrapper[4911]: E0606 09:46:32.391266 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="extract-content" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.391281 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="extract-content" Jun 06 09:46:32 crc kubenswrapper[4911]: E0606 09:46:32.391298 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" containerName="container-00" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.391304 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" containerName="container-00" Jun 06 09:46:32 crc kubenswrapper[4911]: E0606 09:46:32.391325 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="extract-utilities" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.391331 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="extract-utilities" Jun 06 09:46:32 crc kubenswrapper[4911]: E0606 09:46:32.391341 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="registry-server" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.391347 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="registry-server" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.391535 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="02a0ac65-f45f-40dd-a592-64a26253c821" containerName="registry-server" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.391552 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc3c1f8-c885-442d-8773-ebd6dc7b6a49" containerName="container-00" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.392985 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.420832 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kp9r9"] Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.420952 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-catalog-content\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.421532 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf9gm\" (UniqueName: \"kubernetes.io/projected/2dd16970-3b52-4e5b-9798-6c6814dcb663-kube-api-access-wf9gm\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.421637 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-utilities\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.523928 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-catalog-content\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.524055 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf9gm\" (UniqueName: \"kubernetes.io/projected/2dd16970-3b52-4e5b-9798-6c6814dcb663-kube-api-access-wf9gm\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.524166 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-utilities\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.524711 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-utilities\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.524735 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-catalog-content\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.546968 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf9gm\" (UniqueName: \"kubernetes.io/projected/2dd16970-3b52-4e5b-9798-6c6814dcb663-kube-api-access-wf9gm\") pod \"certified-operators-kp9r9\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:32 crc kubenswrapper[4911]: I0606 09:46:32.728910 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:33 crc kubenswrapper[4911]: I0606 09:46:33.214495 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kp9r9"] Jun 06 09:46:33 crc kubenswrapper[4911]: I0606 09:46:33.429314 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kp9r9" event={"ID":"2dd16970-3b52-4e5b-9798-6c6814dcb663","Type":"ContainerStarted","Data":"0278c17c442c4e68d5de6d5c0443de5a3e0b28a69194a546e32c2e1fc992c5bb"} Jun 06 09:46:34 crc kubenswrapper[4911]: I0606 09:46:34.439472 4911 generic.go:334] "Generic (PLEG): container finished" podID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerID="711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477" exitCode=0 Jun 06 09:46:34 crc kubenswrapper[4911]: I0606 09:46:34.439517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kp9r9" event={"ID":"2dd16970-3b52-4e5b-9798-6c6814dcb663","Type":"ContainerDied","Data":"711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477"} Jun 06 09:46:35 crc kubenswrapper[4911]: I0606 09:46:35.453522 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kp9r9" event={"ID":"2dd16970-3b52-4e5b-9798-6c6814dcb663","Type":"ContainerStarted","Data":"ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268"} Jun 06 09:46:36 crc kubenswrapper[4911]: I0606 09:46:36.468197 4911 generic.go:334] "Generic (PLEG): container finished" podID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerID="ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268" exitCode=0 Jun 06 09:46:36 crc kubenswrapper[4911]: I0606 09:46:36.468255 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kp9r9" event={"ID":"2dd16970-3b52-4e5b-9798-6c6814dcb663","Type":"ContainerDied","Data":"ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268"} Jun 06 09:46:37 crc kubenswrapper[4911]: I0606 09:46:37.480898 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kp9r9" event={"ID":"2dd16970-3b52-4e5b-9798-6c6814dcb663","Type":"ContainerStarted","Data":"c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c"} Jun 06 09:46:37 crc kubenswrapper[4911]: I0606 09:46:37.504836 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kp9r9" podStartSLOduration=2.978129255 podStartE2EDuration="5.504811899s" podCreationTimestamp="2025-06-06 09:46:32 +0000 UTC" firstStartedPulling="2025-06-06 09:46:34.441645197 +0000 UTC m=+2005.717070730" lastFinishedPulling="2025-06-06 09:46:36.968327831 +0000 UTC m=+2008.243753374" observedRunningTime="2025-06-06 09:46:37.499598464 +0000 UTC m=+2008.775024017" watchObservedRunningTime="2025-06-06 09:46:37.504811899 +0000 UTC m=+2008.780237472" Jun 06 09:46:42 crc kubenswrapper[4911]: I0606 09:46:42.729661 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:42 crc kubenswrapper[4911]: I0606 09:46:42.730329 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:42 crc kubenswrapper[4911]: I0606 09:46:42.777300 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:43 crc kubenswrapper[4911]: I0606 09:46:43.535487 4911 generic.go:334] "Generic (PLEG): container finished" podID="2947ca03-3408-495a-961c-9d548088ebe4" containerID="314f288b680127b9f08d4b2dc955be7ed8f93e55a3c44714457bc820af706eda" exitCode=0 Jun 06 09:46:43 crc kubenswrapper[4911]: I0606 09:46:43.535558 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" event={"ID":"2947ca03-3408-495a-961c-9d548088ebe4","Type":"ContainerDied","Data":"314f288b680127b9f08d4b2dc955be7ed8f93e55a3c44714457bc820af706eda"} Jun 06 09:46:43 crc kubenswrapper[4911]: I0606 09:46:43.587847 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:43 crc kubenswrapper[4911]: I0606 09:46:43.645664 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kp9r9"] Jun 06 09:46:44 crc kubenswrapper[4911]: I0606 09:46:44.993721 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.115333 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crch2\" (UniqueName: \"kubernetes.io/projected/2947ca03-3408-495a-961c-9d548088ebe4-kube-api-access-crch2\") pod \"2947ca03-3408-495a-961c-9d548088ebe4\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.115506 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ssh-key\") pod \"2947ca03-3408-495a-961c-9d548088ebe4\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.115697 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ovn-combined-ca-bundle\") pod \"2947ca03-3408-495a-961c-9d548088ebe4\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.115811 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-inventory\") pod \"2947ca03-3408-495a-961c-9d548088ebe4\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.115980 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2947ca03-3408-495a-961c-9d548088ebe4-ovncontroller-config-0\") pod \"2947ca03-3408-495a-961c-9d548088ebe4\" (UID: \"2947ca03-3408-495a-961c-9d548088ebe4\") " Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.123228 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "2947ca03-3408-495a-961c-9d548088ebe4" (UID: "2947ca03-3408-495a-961c-9d548088ebe4"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.123411 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2947ca03-3408-495a-961c-9d548088ebe4-kube-api-access-crch2" (OuterVolumeSpecName: "kube-api-access-crch2") pod "2947ca03-3408-495a-961c-9d548088ebe4" (UID: "2947ca03-3408-495a-961c-9d548088ebe4"). InnerVolumeSpecName "kube-api-access-crch2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.146239 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-inventory" (OuterVolumeSpecName: "inventory") pod "2947ca03-3408-495a-961c-9d548088ebe4" (UID: "2947ca03-3408-495a-961c-9d548088ebe4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.148993 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2947ca03-3408-495a-961c-9d548088ebe4" (UID: "2947ca03-3408-495a-961c-9d548088ebe4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.156357 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2947ca03-3408-495a-961c-9d548088ebe4-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "2947ca03-3408-495a-961c-9d548088ebe4" (UID: "2947ca03-3408-495a-961c-9d548088ebe4"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.218997 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crch2\" (UniqueName: \"kubernetes.io/projected/2947ca03-3408-495a-961c-9d548088ebe4-kube-api-access-crch2\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.219043 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.219058 4911 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.219073 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2947ca03-3408-495a-961c-9d548088ebe4-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.219107 4911 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2947ca03-3408-495a-961c-9d548088ebe4-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.557223 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.557267 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qhv7q" event={"ID":"2947ca03-3408-495a-961c-9d548088ebe4","Type":"ContainerDied","Data":"76d30e03e7cecaea2f82a2725351fbf1ac6f231b43b867b2e997bbbb1cfa8791"} Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.557603 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76d30e03e7cecaea2f82a2725351fbf1ac6f231b43b867b2e997bbbb1cfa8791" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.557407 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kp9r9" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="registry-server" containerID="cri-o://c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c" gracePeriod=2 Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.712766 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9"] Jun 06 09:46:45 crc kubenswrapper[4911]: E0606 09:46:45.713266 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2947ca03-3408-495a-961c-9d548088ebe4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.713287 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2947ca03-3408-495a-961c-9d548088ebe4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.713561 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2947ca03-3408-495a-961c-9d548088ebe4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.714415 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.717639 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.717862 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.718041 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.727947 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.727947 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.728176 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.735192 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9"] Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.830982 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.831336 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.831447 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.831641 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.831711 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.831769 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57wh6\" (UniqueName: \"kubernetes.io/projected/eea86939-055a-4c55-a850-7cdd3c82e998-kube-api-access-57wh6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.934963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.935038 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.935191 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.935249 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.935299 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57wh6\" (UniqueName: \"kubernetes.io/projected/eea86939-055a-4c55-a850-7cdd3c82e998-kube-api-access-57wh6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.935391 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.944316 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.947126 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.947560 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.953388 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.963366 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57wh6\" (UniqueName: \"kubernetes.io/projected/eea86939-055a-4c55-a850-7cdd3c82e998-kube-api-access-57wh6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:45 crc kubenswrapper[4911]: I0606 09:46:45.973717 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.055473 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.207544 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.244675 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-utilities\") pod \"2dd16970-3b52-4e5b-9798-6c6814dcb663\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.245983 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf9gm\" (UniqueName: \"kubernetes.io/projected/2dd16970-3b52-4e5b-9798-6c6814dcb663-kube-api-access-wf9gm\") pod \"2dd16970-3b52-4e5b-9798-6c6814dcb663\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.246160 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-catalog-content\") pod \"2dd16970-3b52-4e5b-9798-6c6814dcb663\" (UID: \"2dd16970-3b52-4e5b-9798-6c6814dcb663\") " Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.246242 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-utilities" (OuterVolumeSpecName: "utilities") pod "2dd16970-3b52-4e5b-9798-6c6814dcb663" (UID: "2dd16970-3b52-4e5b-9798-6c6814dcb663"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.249050 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.257391 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dd16970-3b52-4e5b-9798-6c6814dcb663-kube-api-access-wf9gm" (OuterVolumeSpecName: "kube-api-access-wf9gm") pod "2dd16970-3b52-4e5b-9798-6c6814dcb663" (UID: "2dd16970-3b52-4e5b-9798-6c6814dcb663"). InnerVolumeSpecName "kube-api-access-wf9gm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.286709 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2dd16970-3b52-4e5b-9798-6c6814dcb663" (UID: "2dd16970-3b52-4e5b-9798-6c6814dcb663"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.352053 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd16970-3b52-4e5b-9798-6c6814dcb663-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.352492 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf9gm\" (UniqueName: \"kubernetes.io/projected/2dd16970-3b52-4e5b-9798-6c6814dcb663-kube-api-access-wf9gm\") on node \"crc\" DevicePath \"\"" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.571754 4911 generic.go:334] "Generic (PLEG): container finished" podID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerID="c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c" exitCode=0 Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.571806 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kp9r9" event={"ID":"2dd16970-3b52-4e5b-9798-6c6814dcb663","Type":"ContainerDied","Data":"c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c"} Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.571844 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kp9r9" event={"ID":"2dd16970-3b52-4e5b-9798-6c6814dcb663","Type":"ContainerDied","Data":"0278c17c442c4e68d5de6d5c0443de5a3e0b28a69194a546e32c2e1fc992c5bb"} Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.571865 4911 scope.go:117] "RemoveContainer" containerID="c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.572232 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kp9r9" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.596812 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9"] Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.615463 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kp9r9"] Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.617287 4911 scope.go:117] "RemoveContainer" containerID="ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.623286 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kp9r9"] Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.643856 4911 scope.go:117] "RemoveContainer" containerID="711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.678591 4911 scope.go:117] "RemoveContainer" containerID="c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c" Jun 06 09:46:46 crc kubenswrapper[4911]: E0606 09:46:46.679218 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c\": container with ID starting with c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c not found: ID does not exist" containerID="c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.679260 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c"} err="failed to get container status \"c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c\": rpc error: code = NotFound desc = could not find container \"c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c\": container with ID starting with c3fe48e6ac873f5a8a7163f0a8581c90cf1a3510ef8d17faf4c92fd362276d9c not found: ID does not exist" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.679286 4911 scope.go:117] "RemoveContainer" containerID="ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268" Jun 06 09:46:46 crc kubenswrapper[4911]: E0606 09:46:46.679550 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268\": container with ID starting with ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268 not found: ID does not exist" containerID="ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.679589 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268"} err="failed to get container status \"ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268\": rpc error: code = NotFound desc = could not find container \"ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268\": container with ID starting with ac8ee896ed0a35718ebd98ad86305aad91d3b8f8b06af5b9486e401e5b2d0268 not found: ID does not exist" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.679609 4911 scope.go:117] "RemoveContainer" containerID="711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477" Jun 06 09:46:46 crc kubenswrapper[4911]: E0606 09:46:46.679898 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477\": container with ID starting with 711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477 not found: ID does not exist" containerID="711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477" Jun 06 09:46:46 crc kubenswrapper[4911]: I0606 09:46:46.679931 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477"} err="failed to get container status \"711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477\": rpc error: code = NotFound desc = could not find container \"711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477\": container with ID starting with 711db486df4a5cadcc81e5f3b82511f78341533581102d02b93fe9c22216a477 not found: ID does not exist" Jun 06 09:46:47 crc kubenswrapper[4911]: I0606 09:46:47.587749 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" event={"ID":"eea86939-055a-4c55-a850-7cdd3c82e998","Type":"ContainerStarted","Data":"0a9fcca55d45d7da66f3e8d4c671dd702c994b32620e416b39fed0e6f399fb14"} Jun 06 09:46:47 crc kubenswrapper[4911]: I0606 09:46:47.588351 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" event={"ID":"eea86939-055a-4c55-a850-7cdd3c82e998","Type":"ContainerStarted","Data":"c8d237de0e973b2c6b9168b3494d921c99ac14189ebee70df1c5c28df11eedb9"} Jun 06 09:46:47 crc kubenswrapper[4911]: I0606 09:46:47.608600 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" podStartSLOduration=2.20580823 podStartE2EDuration="2.608579867s" podCreationTimestamp="2025-06-06 09:46:45 +0000 UTC" firstStartedPulling="2025-06-06 09:46:46.61748551 +0000 UTC m=+2017.892911053" lastFinishedPulling="2025-06-06 09:46:47.020257147 +0000 UTC m=+2018.295682690" observedRunningTime="2025-06-06 09:46:47.606683568 +0000 UTC m=+2018.882109121" watchObservedRunningTime="2025-06-06 09:46:47.608579867 +0000 UTC m=+2018.884005410" Jun 06 09:46:47 crc kubenswrapper[4911]: I0606 09:46:47.968445 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" path="/var/lib/kubelet/pods/2dd16970-3b52-4e5b-9798-6c6814dcb663/volumes" Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.299908 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.300824 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.300897 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.302057 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"90c33cab05764d4260fff87e3c1193421c452a61479539125aaaae13a635ce01"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.302153 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://90c33cab05764d4260fff87e3c1193421c452a61479539125aaaae13a635ce01" gracePeriod=600 Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.653442 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="90c33cab05764d4260fff87e3c1193421c452a61479539125aaaae13a635ce01" exitCode=0 Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.653754 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"90c33cab05764d4260fff87e3c1193421c452a61479539125aaaae13a635ce01"} Jun 06 09:46:54 crc kubenswrapper[4911]: I0606 09:46:54.653794 4911 scope.go:117] "RemoveContainer" containerID="ff6c3528941b2edd96dcfea37a333476114b34c7fb0460572a659408ac37f2c1" Jun 06 09:46:55 crc kubenswrapper[4911]: I0606 09:46:55.668918 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81"} Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.045382 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-h2rnn"] Jun 06 09:47:02 crc kubenswrapper[4911]: E0606 09:47:02.046374 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="registry-server" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.046394 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="registry-server" Jun 06 09:47:02 crc kubenswrapper[4911]: E0606 09:47:02.046411 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="extract-utilities" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.046419 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="extract-utilities" Jun 06 09:47:02 crc kubenswrapper[4911]: E0606 09:47:02.046450 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="extract-content" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.046459 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="extract-content" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.046724 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dd16970-3b52-4e5b-9798-6c6814dcb663" containerName="registry-server" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.047532 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.191755 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tspn\" (UniqueName: \"kubernetes.io/projected/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-kube-api-access-4tspn\") pod \"crc-debug-h2rnn\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.191856 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-host\") pod \"crc-debug-h2rnn\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.293584 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tspn\" (UniqueName: \"kubernetes.io/projected/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-kube-api-access-4tspn\") pod \"crc-debug-h2rnn\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.293651 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-host\") pod \"crc-debug-h2rnn\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.293802 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-host\") pod \"crc-debug-h2rnn\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.316152 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tspn\" (UniqueName: \"kubernetes.io/projected/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-kube-api-access-4tspn\") pod \"crc-debug-h2rnn\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.372117 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h2rnn" Jun 06 09:47:02 crc kubenswrapper[4911]: W0606 09:47:02.408937 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5be337a8_f6a1_4c49_92c0_3ce89bce03f2.slice/crio-2ffc71a5a68b8d253b2761e337e5828384588c525741b4dadfea7f5a399a0602 WatchSource:0}: Error finding container 2ffc71a5a68b8d253b2761e337e5828384588c525741b4dadfea7f5a399a0602: Status 404 returned error can't find the container with id 2ffc71a5a68b8d253b2761e337e5828384588c525741b4dadfea7f5a399a0602 Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.733664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-h2rnn" event={"ID":"5be337a8-f6a1-4c49-92c0-3ce89bce03f2","Type":"ContainerStarted","Data":"3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106"} Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.733971 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-h2rnn" event={"ID":"5be337a8-f6a1-4c49-92c0-3ce89bce03f2","Type":"ContainerStarted","Data":"2ffc71a5a68b8d253b2761e337e5828384588c525741b4dadfea7f5a399a0602"} Jun 06 09:47:02 crc kubenswrapper[4911]: I0606 09:47:02.748992 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-h2rnn" podStartSLOduration=0.748967333 podStartE2EDuration="748.967333ms" podCreationTimestamp="2025-06-06 09:47:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:47:02.74807231 +0000 UTC m=+2034.023497863" watchObservedRunningTime="2025-06-06 09:47:02.748967333 +0000 UTC m=+2034.024392886" Jun 06 09:47:12 crc kubenswrapper[4911]: I0606 09:47:12.988199 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-h2rnn"] Jun 06 09:47:12 crc kubenswrapper[4911]: I0606 09:47:12.989610 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-h2rnn" podUID="5be337a8-f6a1-4c49-92c0-3ce89bce03f2" containerName="container-00" containerID="cri-o://3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106" gracePeriod=2 Jun 06 09:47:12 crc kubenswrapper[4911]: I0606 09:47:12.996041 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-h2rnn"] Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.116013 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h2rnn" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.221518 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-host\") pod \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.221652 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tspn\" (UniqueName: \"kubernetes.io/projected/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-kube-api-access-4tspn\") pod \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\" (UID: \"5be337a8-f6a1-4c49-92c0-3ce89bce03f2\") " Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.221685 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-host" (OuterVolumeSpecName: "host") pod "5be337a8-f6a1-4c49-92c0-3ce89bce03f2" (UID: "5be337a8-f6a1-4c49-92c0-3ce89bce03f2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.222855 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.227667 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-kube-api-access-4tspn" (OuterVolumeSpecName: "kube-api-access-4tspn") pod "5be337a8-f6a1-4c49-92c0-3ce89bce03f2" (UID: "5be337a8-f6a1-4c49-92c0-3ce89bce03f2"). InnerVolumeSpecName "kube-api-access-4tspn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.324576 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tspn\" (UniqueName: \"kubernetes.io/projected/5be337a8-f6a1-4c49-92c0-3ce89bce03f2-kube-api-access-4tspn\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.846236 4911 generic.go:334] "Generic (PLEG): container finished" podID="5be337a8-f6a1-4c49-92c0-3ce89bce03f2" containerID="3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106" exitCode=0 Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.846311 4911 scope.go:117] "RemoveContainer" containerID="3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.846336 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h2rnn" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.873433 4911 scope.go:117] "RemoveContainer" containerID="3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106" Jun 06 09:47:13 crc kubenswrapper[4911]: E0606 09:47:13.875639 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106\": container with ID starting with 3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106 not found: ID does not exist" containerID="3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.875720 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106"} err="failed to get container status \"3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106\": rpc error: code = NotFound desc = could not find container \"3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106\": container with ID starting with 3dcdd456238b2e1e1239581041c2e6187329baf3173ea9b12adb4c9832a62106 not found: ID does not exist" Jun 06 09:47:13 crc kubenswrapper[4911]: I0606 09:47:13.961724 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5be337a8-f6a1-4c49-92c0-3ce89bce03f2" path="/var/lib/kubelet/pods/5be337a8-f6a1-4c49-92c0-3ce89bce03f2/volumes" Jun 06 09:47:25 crc kubenswrapper[4911]: E0606 09:47:25.617758 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeea86939_055a_4c55_a850_7cdd3c82e998.slice/crio-conmon-0a9fcca55d45d7da66f3e8d4c671dd702c994b32620e416b39fed0e6f399fb14.scope\": RecentStats: unable to find data in memory cache]" Jun 06 09:47:25 crc kubenswrapper[4911]: I0606 09:47:25.961744 4911 generic.go:334] "Generic (PLEG): container finished" podID="eea86939-055a-4c55-a850-7cdd3c82e998" containerID="0a9fcca55d45d7da66f3e8d4c671dd702c994b32620e416b39fed0e6f399fb14" exitCode=0 Jun 06 09:47:25 crc kubenswrapper[4911]: I0606 09:47:25.961826 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" event={"ID":"eea86939-055a-4c55-a850-7cdd3c82e998","Type":"ContainerDied","Data":"0a9fcca55d45d7da66f3e8d4c671dd702c994b32620e416b39fed0e6f399fb14"} Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.399764 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.468038 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-nova-metadata-neutron-config-0\") pod \"eea86939-055a-4c55-a850-7cdd3c82e998\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.468288 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-inventory\") pod \"eea86939-055a-4c55-a850-7cdd3c82e998\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.468328 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-ovn-metadata-agent-neutron-config-0\") pod \"eea86939-055a-4c55-a850-7cdd3c82e998\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.468374 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-ssh-key\") pod \"eea86939-055a-4c55-a850-7cdd3c82e998\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.468402 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-metadata-combined-ca-bundle\") pod \"eea86939-055a-4c55-a850-7cdd3c82e998\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.468447 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57wh6\" (UniqueName: \"kubernetes.io/projected/eea86939-055a-4c55-a850-7cdd3c82e998-kube-api-access-57wh6\") pod \"eea86939-055a-4c55-a850-7cdd3c82e998\" (UID: \"eea86939-055a-4c55-a850-7cdd3c82e998\") " Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.475275 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea86939-055a-4c55-a850-7cdd3c82e998-kube-api-access-57wh6" (OuterVolumeSpecName: "kube-api-access-57wh6") pod "eea86939-055a-4c55-a850-7cdd3c82e998" (UID: "eea86939-055a-4c55-a850-7cdd3c82e998"). InnerVolumeSpecName "kube-api-access-57wh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.475747 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "eea86939-055a-4c55-a850-7cdd3c82e998" (UID: "eea86939-055a-4c55-a850-7cdd3c82e998"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.504059 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-inventory" (OuterVolumeSpecName: "inventory") pod "eea86939-055a-4c55-a850-7cdd3c82e998" (UID: "eea86939-055a-4c55-a850-7cdd3c82e998"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.506691 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "eea86939-055a-4c55-a850-7cdd3c82e998" (UID: "eea86939-055a-4c55-a850-7cdd3c82e998"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.506723 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "eea86939-055a-4c55-a850-7cdd3c82e998" (UID: "eea86939-055a-4c55-a850-7cdd3c82e998"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.507125 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "eea86939-055a-4c55-a850-7cdd3c82e998" (UID: "eea86939-055a-4c55-a850-7cdd3c82e998"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.571009 4911 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.571060 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.571074 4911 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.571103 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.571127 4911 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea86939-055a-4c55-a850-7cdd3c82e998-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.571143 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57wh6\" (UniqueName: \"kubernetes.io/projected/eea86939-055a-4c55-a850-7cdd3c82e998-kube-api-access-57wh6\") on node \"crc\" DevicePath \"\"" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.982702 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" event={"ID":"eea86939-055a-4c55-a850-7cdd3c82e998","Type":"ContainerDied","Data":"c8d237de0e973b2c6b9168b3494d921c99ac14189ebee70df1c5c28df11eedb9"} Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.982754 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9" Jun 06 09:47:27 crc kubenswrapper[4911]: I0606 09:47:27.982758 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8d237de0e973b2c6b9168b3494d921c99ac14189ebee70df1c5c28df11eedb9" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.085546 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw"] Jun 06 09:47:28 crc kubenswrapper[4911]: E0606 09:47:28.086157 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5be337a8-f6a1-4c49-92c0-3ce89bce03f2" containerName="container-00" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.086178 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5be337a8-f6a1-4c49-92c0-3ce89bce03f2" containerName="container-00" Jun 06 09:47:28 crc kubenswrapper[4911]: E0606 09:47:28.086203 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea86939-055a-4c55-a850-7cdd3c82e998" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.086214 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea86939-055a-4c55-a850-7cdd3c82e998" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.086389 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea86939-055a-4c55-a850-7cdd3c82e998" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.086418 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5be337a8-f6a1-4c49-92c0-3ce89bce03f2" containerName="container-00" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.087378 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.090067 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.095532 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw"] Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.127894 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.128439 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.129742 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.130059 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.182306 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.182800 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.182832 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.182852 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.182931 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c7cb\" (UniqueName: \"kubernetes.io/projected/7bf0ed24-49d2-403e-afe0-25483f916433-kube-api-access-8c7cb\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.286197 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.286287 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.286331 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.286448 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c7cb\" (UniqueName: \"kubernetes.io/projected/7bf0ed24-49d2-403e-afe0-25483f916433-kube-api-access-8c7cb\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.286555 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.292221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.292268 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.292414 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.293846 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.307852 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c7cb\" (UniqueName: \"kubernetes.io/projected/7bf0ed24-49d2-403e-afe0-25483f916433-kube-api-access-8c7cb\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-csqcw\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:28 crc kubenswrapper[4911]: I0606 09:47:28.444687 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:47:29 crc kubenswrapper[4911]: I0606 09:47:29.017510 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw"] Jun 06 09:47:30 crc kubenswrapper[4911]: I0606 09:47:30.003755 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" event={"ID":"7bf0ed24-49d2-403e-afe0-25483f916433","Type":"ContainerStarted","Data":"1973bce1e62909be9399c84245221a51848a89f4659894a1b0c290cde3d38f37"} Jun 06 09:47:30 crc kubenswrapper[4911]: I0606 09:47:30.004367 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" event={"ID":"7bf0ed24-49d2-403e-afe0-25483f916433","Type":"ContainerStarted","Data":"5e4ffb4fb5d76a6b53c3b43648593f4040c225c4ed4d3363b55b680e325d8ab0"} Jun 06 09:47:30 crc kubenswrapper[4911]: I0606 09:47:30.028568 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" podStartSLOduration=1.498310841 podStartE2EDuration="2.028544688s" podCreationTimestamp="2025-06-06 09:47:28 +0000 UTC" firstStartedPulling="2025-06-06 09:47:29.025276905 +0000 UTC m=+2060.300702448" lastFinishedPulling="2025-06-06 09:47:29.555510752 +0000 UTC m=+2060.830936295" observedRunningTime="2025-06-06 09:47:30.020529361 +0000 UTC m=+2061.295954924" watchObservedRunningTime="2025-06-06 09:47:30.028544688 +0000 UTC m=+2061.303970231" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.481713 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-c6d8s"] Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.484911 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-c6d8s" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.635585 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3015c280-3c09-4b58-815b-30e61255a570-host\") pod \"crc-debug-c6d8s\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " pod="openstack/crc-debug-c6d8s" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.635695 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjhtg\" (UniqueName: \"kubernetes.io/projected/3015c280-3c09-4b58-815b-30e61255a570-kube-api-access-hjhtg\") pod \"crc-debug-c6d8s\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " pod="openstack/crc-debug-c6d8s" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.738111 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3015c280-3c09-4b58-815b-30e61255a570-host\") pod \"crc-debug-c6d8s\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " pod="openstack/crc-debug-c6d8s" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.738279 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjhtg\" (UniqueName: \"kubernetes.io/projected/3015c280-3c09-4b58-815b-30e61255a570-kube-api-access-hjhtg\") pod \"crc-debug-c6d8s\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " pod="openstack/crc-debug-c6d8s" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.738393 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3015c280-3c09-4b58-815b-30e61255a570-host\") pod \"crc-debug-c6d8s\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " pod="openstack/crc-debug-c6d8s" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.761060 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjhtg\" (UniqueName: \"kubernetes.io/projected/3015c280-3c09-4b58-815b-30e61255a570-kube-api-access-hjhtg\") pod \"crc-debug-c6d8s\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " pod="openstack/crc-debug-c6d8s" Jun 06 09:48:02 crc kubenswrapper[4911]: I0606 09:48:02.809509 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-c6d8s" Jun 06 09:48:03 crc kubenswrapper[4911]: I0606 09:48:03.349305 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-c6d8s" event={"ID":"3015c280-3c09-4b58-815b-30e61255a570","Type":"ContainerStarted","Data":"22752fadad8d52858e8cc975435524ebc9c626aec91fe6005971de20b0ff0cf1"} Jun 06 09:48:03 crc kubenswrapper[4911]: I0606 09:48:03.349618 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-c6d8s" event={"ID":"3015c280-3c09-4b58-815b-30e61255a570","Type":"ContainerStarted","Data":"07825b60e582b36fec7605c68a8846e9cce472a316f7351931f57e953f9d9275"} Jun 06 09:48:03 crc kubenswrapper[4911]: I0606 09:48:03.376170 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-c6d8s" podStartSLOduration=1.376143566 podStartE2EDuration="1.376143566s" podCreationTimestamp="2025-06-06 09:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:48:03.369887994 +0000 UTC m=+2094.645313537" watchObservedRunningTime="2025-06-06 09:48:03.376143566 +0000 UTC m=+2094.651569109" Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.426193 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-c6d8s"] Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.427407 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-c6d8s" podUID="3015c280-3c09-4b58-815b-30e61255a570" containerName="container-00" containerID="cri-o://22752fadad8d52858e8cc975435524ebc9c626aec91fe6005971de20b0ff0cf1" gracePeriod=2 Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.435267 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-c6d8s"] Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.458841 4911 generic.go:334] "Generic (PLEG): container finished" podID="3015c280-3c09-4b58-815b-30e61255a570" containerID="22752fadad8d52858e8cc975435524ebc9c626aec91fe6005971de20b0ff0cf1" exitCode=0 Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.536738 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-c6d8s" Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.678672 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjhtg\" (UniqueName: \"kubernetes.io/projected/3015c280-3c09-4b58-815b-30e61255a570-kube-api-access-hjhtg\") pod \"3015c280-3c09-4b58-815b-30e61255a570\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.678801 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3015c280-3c09-4b58-815b-30e61255a570-host\") pod \"3015c280-3c09-4b58-815b-30e61255a570\" (UID: \"3015c280-3c09-4b58-815b-30e61255a570\") " Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.678956 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3015c280-3c09-4b58-815b-30e61255a570-host" (OuterVolumeSpecName: "host") pod "3015c280-3c09-4b58-815b-30e61255a570" (UID: "3015c280-3c09-4b58-815b-30e61255a570"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.679423 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3015c280-3c09-4b58-815b-30e61255a570-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.685422 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3015c280-3c09-4b58-815b-30e61255a570-kube-api-access-hjhtg" (OuterVolumeSpecName: "kube-api-access-hjhtg") pod "3015c280-3c09-4b58-815b-30e61255a570" (UID: "3015c280-3c09-4b58-815b-30e61255a570"). InnerVolumeSpecName "kube-api-access-hjhtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.781867 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjhtg\" (UniqueName: \"kubernetes.io/projected/3015c280-3c09-4b58-815b-30e61255a570-kube-api-access-hjhtg\") on node \"crc\" DevicePath \"\"" Jun 06 09:48:13 crc kubenswrapper[4911]: I0606 09:48:13.960477 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3015c280-3c09-4b58-815b-30e61255a570" path="/var/lib/kubelet/pods/3015c280-3c09-4b58-815b-30e61255a570/volumes" Jun 06 09:48:14 crc kubenswrapper[4911]: I0606 09:48:14.468347 4911 scope.go:117] "RemoveContainer" containerID="22752fadad8d52858e8cc975435524ebc9c626aec91fe6005971de20b0ff0cf1" Jun 06 09:48:14 crc kubenswrapper[4911]: I0606 09:48:14.468398 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-c6d8s" Jun 06 09:48:54 crc kubenswrapper[4911]: I0606 09:48:54.300216 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:48:54 crc kubenswrapper[4911]: I0606 09:48:54.300819 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:49:01 crc kubenswrapper[4911]: I0606 09:49:01.878705 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-bkvpl"] Jun 06 09:49:01 crc kubenswrapper[4911]: E0606 09:49:01.879860 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3015c280-3c09-4b58-815b-30e61255a570" containerName="container-00" Jun 06 09:49:01 crc kubenswrapper[4911]: I0606 09:49:01.879875 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3015c280-3c09-4b58-815b-30e61255a570" containerName="container-00" Jun 06 09:49:01 crc kubenswrapper[4911]: I0606 09:49:01.880115 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3015c280-3c09-4b58-815b-30e61255a570" containerName="container-00" Jun 06 09:49:01 crc kubenswrapper[4911]: I0606 09:49:01.880814 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.040652 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfqhv\" (UniqueName: \"kubernetes.io/projected/50bb7dd6-0859-4303-9dad-764a79a59792-kube-api-access-qfqhv\") pod \"crc-debug-bkvpl\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.040827 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50bb7dd6-0859-4303-9dad-764a79a59792-host\") pod \"crc-debug-bkvpl\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.142533 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfqhv\" (UniqueName: \"kubernetes.io/projected/50bb7dd6-0859-4303-9dad-764a79a59792-kube-api-access-qfqhv\") pod \"crc-debug-bkvpl\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.142655 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50bb7dd6-0859-4303-9dad-764a79a59792-host\") pod \"crc-debug-bkvpl\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.142792 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50bb7dd6-0859-4303-9dad-764a79a59792-host\") pod \"crc-debug-bkvpl\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.164625 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfqhv\" (UniqueName: \"kubernetes.io/projected/50bb7dd6-0859-4303-9dad-764a79a59792-kube-api-access-qfqhv\") pod \"crc-debug-bkvpl\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.205398 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bkvpl" Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.981961 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bkvpl" event={"ID":"50bb7dd6-0859-4303-9dad-764a79a59792","Type":"ContainerStarted","Data":"794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6"} Jun 06 09:49:02 crc kubenswrapper[4911]: I0606 09:49:02.982629 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bkvpl" event={"ID":"50bb7dd6-0859-4303-9dad-764a79a59792","Type":"ContainerStarted","Data":"f1a04455cab2e5f169e72fe3933659a6a716f2fc22385f26ce4f187692430c18"} Jun 06 09:49:03 crc kubenswrapper[4911]: I0606 09:49:03.002417 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-bkvpl" podStartSLOduration=2.002394766 podStartE2EDuration="2.002394766s" podCreationTimestamp="2025-06-06 09:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:49:02.996871532 +0000 UTC m=+2154.272297075" watchObservedRunningTime="2025-06-06 09:49:03.002394766 +0000 UTC m=+2154.277820309" Jun 06 09:49:12 crc kubenswrapper[4911]: I0606 09:49:12.834822 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-bkvpl"] Jun 06 09:49:12 crc kubenswrapper[4911]: I0606 09:49:12.835730 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-bkvpl" podUID="50bb7dd6-0859-4303-9dad-764a79a59792" containerName="container-00" containerID="cri-o://794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6" gracePeriod=2 Jun 06 09:49:12 crc kubenswrapper[4911]: I0606 09:49:12.853123 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-bkvpl"] Jun 06 09:49:12 crc kubenswrapper[4911]: I0606 09:49:12.949696 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bkvpl" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.071580 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50bb7dd6-0859-4303-9dad-764a79a59792-host\") pod \"50bb7dd6-0859-4303-9dad-764a79a59792\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.071711 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50bb7dd6-0859-4303-9dad-764a79a59792-host" (OuterVolumeSpecName: "host") pod "50bb7dd6-0859-4303-9dad-764a79a59792" (UID: "50bb7dd6-0859-4303-9dad-764a79a59792"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.072027 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfqhv\" (UniqueName: \"kubernetes.io/projected/50bb7dd6-0859-4303-9dad-764a79a59792-kube-api-access-qfqhv\") pod \"50bb7dd6-0859-4303-9dad-764a79a59792\" (UID: \"50bb7dd6-0859-4303-9dad-764a79a59792\") " Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.073985 4911 generic.go:334] "Generic (PLEG): container finished" podID="50bb7dd6-0859-4303-9dad-764a79a59792" containerID="794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6" exitCode=0 Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.074063 4911 scope.go:117] "RemoveContainer" containerID="794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.074116 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bkvpl" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.074875 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50bb7dd6-0859-4303-9dad-764a79a59792-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.079300 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50bb7dd6-0859-4303-9dad-764a79a59792-kube-api-access-qfqhv" (OuterVolumeSpecName: "kube-api-access-qfqhv") pod "50bb7dd6-0859-4303-9dad-764a79a59792" (UID: "50bb7dd6-0859-4303-9dad-764a79a59792"). InnerVolumeSpecName "kube-api-access-qfqhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.149056 4911 scope.go:117] "RemoveContainer" containerID="794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6" Jun 06 09:49:13 crc kubenswrapper[4911]: E0606 09:49:13.149743 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6\": container with ID starting with 794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6 not found: ID does not exist" containerID="794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.149836 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6"} err="failed to get container status \"794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6\": rpc error: code = NotFound desc = could not find container \"794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6\": container with ID starting with 794dc2b473ff8ca01511dcf6913d98c7565e673e994b010242c4944b378b35a6 not found: ID does not exist" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.178082 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfqhv\" (UniqueName: \"kubernetes.io/projected/50bb7dd6-0859-4303-9dad-764a79a59792-kube-api-access-qfqhv\") on node \"crc\" DevicePath \"\"" Jun 06 09:49:13 crc kubenswrapper[4911]: I0606 09:49:13.961966 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50bb7dd6-0859-4303-9dad-764a79a59792" path="/var/lib/kubelet/pods/50bb7dd6-0859-4303-9dad-764a79a59792/volumes" Jun 06 09:49:24 crc kubenswrapper[4911]: I0606 09:49:24.300894 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:49:24 crc kubenswrapper[4911]: I0606 09:49:24.301510 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.300262 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.300768 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.300825 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.301628 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.301747 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" gracePeriod=600 Jun 06 09:49:54 crc kubenswrapper[4911]: E0606 09:49:54.425381 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.477765 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" exitCode=0 Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.477824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81"} Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.477862 4911 scope.go:117] "RemoveContainer" containerID="90c33cab05764d4260fff87e3c1193421c452a61479539125aaaae13a635ce01" Jun 06 09:49:54 crc kubenswrapper[4911]: I0606 09:49:54.478669 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:49:54 crc kubenswrapper[4911]: E0606 09:49:54.478968 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.160291 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-hvfbd"] Jun 06 09:50:02 crc kubenswrapper[4911]: E0606 09:50:02.162226 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50bb7dd6-0859-4303-9dad-764a79a59792" containerName="container-00" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.162252 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="50bb7dd6-0859-4303-9dad-764a79a59792" containerName="container-00" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.162562 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="50bb7dd6-0859-4303-9dad-764a79a59792" containerName="container-00" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.163815 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.245079 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9aae832f-1bf3-4956-9855-0489143aa20f-host\") pod \"crc-debug-hvfbd\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.245326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q68x\" (UniqueName: \"kubernetes.io/projected/9aae832f-1bf3-4956-9855-0489143aa20f-kube-api-access-7q68x\") pod \"crc-debug-hvfbd\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.348412 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9aae832f-1bf3-4956-9855-0489143aa20f-host\") pod \"crc-debug-hvfbd\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.348574 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9aae832f-1bf3-4956-9855-0489143aa20f-host\") pod \"crc-debug-hvfbd\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.348605 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q68x\" (UniqueName: \"kubernetes.io/projected/9aae832f-1bf3-4956-9855-0489143aa20f-kube-api-access-7q68x\") pod \"crc-debug-hvfbd\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.371317 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q68x\" (UniqueName: \"kubernetes.io/projected/9aae832f-1bf3-4956-9855-0489143aa20f-kube-api-access-7q68x\") pod \"crc-debug-hvfbd\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.486249 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hvfbd" Jun 06 09:50:02 crc kubenswrapper[4911]: W0606 09:50:02.524629 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9aae832f_1bf3_4956_9855_0489143aa20f.slice/crio-a2ef2e114015b91b4c09ecd3a79c68e40f9001bd5959b2237936f82ffbed6ea8 WatchSource:0}: Error finding container a2ef2e114015b91b4c09ecd3a79c68e40f9001bd5959b2237936f82ffbed6ea8: Status 404 returned error can't find the container with id a2ef2e114015b91b4c09ecd3a79c68e40f9001bd5959b2237936f82ffbed6ea8 Jun 06 09:50:02 crc kubenswrapper[4911]: I0606 09:50:02.582196 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-hvfbd" event={"ID":"9aae832f-1bf3-4956-9855-0489143aa20f","Type":"ContainerStarted","Data":"a2ef2e114015b91b4c09ecd3a79c68e40f9001bd5959b2237936f82ffbed6ea8"} Jun 06 09:50:03 crc kubenswrapper[4911]: I0606 09:50:03.591645 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-hvfbd" event={"ID":"9aae832f-1bf3-4956-9855-0489143aa20f","Type":"ContainerStarted","Data":"17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2"} Jun 06 09:50:03 crc kubenswrapper[4911]: I0606 09:50:03.611727 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-hvfbd" podStartSLOduration=1.611710384 podStartE2EDuration="1.611710384s" podCreationTimestamp="2025-06-06 09:50:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:50:03.6061751 +0000 UTC m=+2214.881600663" watchObservedRunningTime="2025-06-06 09:50:03.611710384 +0000 UTC m=+2214.887135927" Jun 06 09:50:09 crc kubenswrapper[4911]: I0606 09:50:09.954746 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:50:09 crc kubenswrapper[4911]: E0606 09:50:09.955572 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.089568 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-hvfbd"] Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.091152 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-hvfbd" podUID="9aae832f-1bf3-4956-9855-0489143aa20f" containerName="container-00" containerID="cri-o://17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2" gracePeriod=2 Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.104269 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-hvfbd"] Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.189527 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hvfbd" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.279006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q68x\" (UniqueName: \"kubernetes.io/projected/9aae832f-1bf3-4956-9855-0489143aa20f-kube-api-access-7q68x\") pod \"9aae832f-1bf3-4956-9855-0489143aa20f\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.279115 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9aae832f-1bf3-4956-9855-0489143aa20f-host\") pod \"9aae832f-1bf3-4956-9855-0489143aa20f\" (UID: \"9aae832f-1bf3-4956-9855-0489143aa20f\") " Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.279460 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9aae832f-1bf3-4956-9855-0489143aa20f-host" (OuterVolumeSpecName: "host") pod "9aae832f-1bf3-4956-9855-0489143aa20f" (UID: "9aae832f-1bf3-4956-9855-0489143aa20f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.279747 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9aae832f-1bf3-4956-9855-0489143aa20f-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.284429 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aae832f-1bf3-4956-9855-0489143aa20f-kube-api-access-7q68x" (OuterVolumeSpecName: "kube-api-access-7q68x") pod "9aae832f-1bf3-4956-9855-0489143aa20f" (UID: "9aae832f-1bf3-4956-9855-0489143aa20f"). InnerVolumeSpecName "kube-api-access-7q68x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.381217 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q68x\" (UniqueName: \"kubernetes.io/projected/9aae832f-1bf3-4956-9855-0489143aa20f-kube-api-access-7q68x\") on node \"crc\" DevicePath \"\"" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.675473 4911 generic.go:334] "Generic (PLEG): container finished" podID="9aae832f-1bf3-4956-9855-0489143aa20f" containerID="17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2" exitCode=0 Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.675538 4911 scope.go:117] "RemoveContainer" containerID="17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.675584 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hvfbd" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.698130 4911 scope.go:117] "RemoveContainer" containerID="17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2" Jun 06 09:50:13 crc kubenswrapper[4911]: E0606 09:50:13.698582 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2\": container with ID starting with 17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2 not found: ID does not exist" containerID="17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.698622 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2"} err="failed to get container status \"17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2\": rpc error: code = NotFound desc = could not find container \"17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2\": container with ID starting with 17376625895dd8a5cd8289146c6d6262812419cb9c57962b3baf04ee2896f8f2 not found: ID does not exist" Jun 06 09:50:13 crc kubenswrapper[4911]: I0606 09:50:13.959926 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aae832f-1bf3-4956-9855-0489143aa20f" path="/var/lib/kubelet/pods/9aae832f-1bf3-4956-9855-0489143aa20f/volumes" Jun 06 09:50:22 crc kubenswrapper[4911]: I0606 09:50:22.948470 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:50:22 crc kubenswrapper[4911]: E0606 09:50:22.949377 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:50:33 crc kubenswrapper[4911]: I0606 09:50:33.947824 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:50:33 crc kubenswrapper[4911]: E0606 09:50:33.948842 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:50:46 crc kubenswrapper[4911]: I0606 09:50:46.948338 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:50:46 crc kubenswrapper[4911]: E0606 09:50:46.949191 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:50:58 crc kubenswrapper[4911]: I0606 09:50:58.948649 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:50:58 crc kubenswrapper[4911]: E0606 09:50:58.949527 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.487252 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-bwmct"] Jun 06 09:51:01 crc kubenswrapper[4911]: E0606 09:51:01.488008 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aae832f-1bf3-4956-9855-0489143aa20f" containerName="container-00" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.488020 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aae832f-1bf3-4956-9855-0489143aa20f" containerName="container-00" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.488252 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aae832f-1bf3-4956-9855-0489143aa20f" containerName="container-00" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.488879 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bwmct" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.560866 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-host\") pod \"crc-debug-bwmct\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " pod="openstack/crc-debug-bwmct" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.561020 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf7tz\" (UniqueName: \"kubernetes.io/projected/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-kube-api-access-gf7tz\") pod \"crc-debug-bwmct\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " pod="openstack/crc-debug-bwmct" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.662718 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-host\") pod \"crc-debug-bwmct\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " pod="openstack/crc-debug-bwmct" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.662832 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-host\") pod \"crc-debug-bwmct\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " pod="openstack/crc-debug-bwmct" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.662868 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf7tz\" (UniqueName: \"kubernetes.io/projected/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-kube-api-access-gf7tz\") pod \"crc-debug-bwmct\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " pod="openstack/crc-debug-bwmct" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.687030 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf7tz\" (UniqueName: \"kubernetes.io/projected/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-kube-api-access-gf7tz\") pod \"crc-debug-bwmct\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " pod="openstack/crc-debug-bwmct" Jun 06 09:51:01 crc kubenswrapper[4911]: I0606 09:51:01.811566 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bwmct" Jun 06 09:51:02 crc kubenswrapper[4911]: I0606 09:51:02.210715 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bwmct" event={"ID":"f1707f81-62cd-4b17-a203-3fc02ccb9c7e","Type":"ContainerStarted","Data":"2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a"} Jun 06 09:51:02 crc kubenswrapper[4911]: I0606 09:51:02.211412 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bwmct" event={"ID":"f1707f81-62cd-4b17-a203-3fc02ccb9c7e","Type":"ContainerStarted","Data":"f0bd25fb1646292063be79e241eda5c3fd39e3f39d30c27b89316fea322652c8"} Jun 06 09:51:02 crc kubenswrapper[4911]: I0606 09:51:02.225422 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-bwmct" podStartSLOduration=1.225401752 podStartE2EDuration="1.225401752s" podCreationTimestamp="2025-06-06 09:51:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:51:02.222979339 +0000 UTC m=+2273.498404902" watchObservedRunningTime="2025-06-06 09:51:02.225401752 +0000 UTC m=+2273.500827295" Jun 06 09:51:10 crc kubenswrapper[4911]: I0606 09:51:10.948677 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:51:10 crc kubenswrapper[4911]: E0606 09:51:10.949540 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.454211 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-bwmct"] Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.454649 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-bwmct" podUID="f1707f81-62cd-4b17-a203-3fc02ccb9c7e" containerName="container-00" containerID="cri-o://2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a" gracePeriod=2 Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.463247 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-bwmct"] Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.581479 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bwmct" Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.688839 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf7tz\" (UniqueName: \"kubernetes.io/projected/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-kube-api-access-gf7tz\") pod \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.689168 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-host\") pod \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\" (UID: \"f1707f81-62cd-4b17-a203-3fc02ccb9c7e\") " Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.689329 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-host" (OuterVolumeSpecName: "host") pod "f1707f81-62cd-4b17-a203-3fc02ccb9c7e" (UID: "f1707f81-62cd-4b17-a203-3fc02ccb9c7e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.689767 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.698067 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-kube-api-access-gf7tz" (OuterVolumeSpecName: "kube-api-access-gf7tz") pod "f1707f81-62cd-4b17-a203-3fc02ccb9c7e" (UID: "f1707f81-62cd-4b17-a203-3fc02ccb9c7e"). InnerVolumeSpecName "kube-api-access-gf7tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:51:12 crc kubenswrapper[4911]: I0606 09:51:12.791155 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf7tz\" (UniqueName: \"kubernetes.io/projected/f1707f81-62cd-4b17-a203-3fc02ccb9c7e-kube-api-access-gf7tz\") on node \"crc\" DevicePath \"\"" Jun 06 09:51:13 crc kubenswrapper[4911]: I0606 09:51:13.299956 4911 generic.go:334] "Generic (PLEG): container finished" podID="f1707f81-62cd-4b17-a203-3fc02ccb9c7e" containerID="2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a" exitCode=0 Jun 06 09:51:13 crc kubenswrapper[4911]: I0606 09:51:13.300028 4911 scope.go:117] "RemoveContainer" containerID="2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a" Jun 06 09:51:13 crc kubenswrapper[4911]: I0606 09:51:13.300072 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bwmct" Jun 06 09:51:13 crc kubenswrapper[4911]: I0606 09:51:13.318287 4911 scope.go:117] "RemoveContainer" containerID="2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a" Jun 06 09:51:13 crc kubenswrapper[4911]: E0606 09:51:13.318775 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a\": container with ID starting with 2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a not found: ID does not exist" containerID="2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a" Jun 06 09:51:13 crc kubenswrapper[4911]: I0606 09:51:13.318822 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a"} err="failed to get container status \"2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a\": rpc error: code = NotFound desc = could not find container \"2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a\": container with ID starting with 2b791935fb4f728c4ea1a0f76bdb1603b40af955b89caff43f33039c8532a49a not found: ID does not exist" Jun 06 09:51:13 crc kubenswrapper[4911]: I0606 09:51:13.959727 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1707f81-62cd-4b17-a203-3fc02ccb9c7e" path="/var/lib/kubelet/pods/f1707f81-62cd-4b17-a203-3fc02ccb9c7e/volumes" Jun 06 09:51:18 crc kubenswrapper[4911]: I0606 09:51:18.343720 4911 generic.go:334] "Generic (PLEG): container finished" podID="7bf0ed24-49d2-403e-afe0-25483f916433" containerID="1973bce1e62909be9399c84245221a51848a89f4659894a1b0c290cde3d38f37" exitCode=0 Jun 06 09:51:18 crc kubenswrapper[4911]: I0606 09:51:18.344267 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" event={"ID":"7bf0ed24-49d2-403e-afe0-25483f916433","Type":"ContainerDied","Data":"1973bce1e62909be9399c84245221a51848a89f4659894a1b0c290cde3d38f37"} Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.787737 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.828852 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-combined-ca-bundle\") pod \"7bf0ed24-49d2-403e-afe0-25483f916433\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.829005 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-secret-0\") pod \"7bf0ed24-49d2-403e-afe0-25483f916433\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.829063 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8c7cb\" (UniqueName: \"kubernetes.io/projected/7bf0ed24-49d2-403e-afe0-25483f916433-kube-api-access-8c7cb\") pod \"7bf0ed24-49d2-403e-afe0-25483f916433\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.829161 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-ssh-key\") pod \"7bf0ed24-49d2-403e-afe0-25483f916433\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.829207 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-inventory\") pod \"7bf0ed24-49d2-403e-afe0-25483f916433\" (UID: \"7bf0ed24-49d2-403e-afe0-25483f916433\") " Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.836000 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bf0ed24-49d2-403e-afe0-25483f916433-kube-api-access-8c7cb" (OuterVolumeSpecName: "kube-api-access-8c7cb") pod "7bf0ed24-49d2-403e-afe0-25483f916433" (UID: "7bf0ed24-49d2-403e-afe0-25483f916433"). InnerVolumeSpecName "kube-api-access-8c7cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.836142 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "7bf0ed24-49d2-403e-afe0-25483f916433" (UID: "7bf0ed24-49d2-403e-afe0-25483f916433"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.860138 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "7bf0ed24-49d2-403e-afe0-25483f916433" (UID: "7bf0ed24-49d2-403e-afe0-25483f916433"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.861735 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7bf0ed24-49d2-403e-afe0-25483f916433" (UID: "7bf0ed24-49d2-403e-afe0-25483f916433"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.862106 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-inventory" (OuterVolumeSpecName: "inventory") pod "7bf0ed24-49d2-403e-afe0-25483f916433" (UID: "7bf0ed24-49d2-403e-afe0-25483f916433"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.931843 4911 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.931892 4911 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.931904 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8c7cb\" (UniqueName: \"kubernetes.io/projected/7bf0ed24-49d2-403e-afe0-25483f916433-kube-api-access-8c7cb\") on node \"crc\" DevicePath \"\"" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.931913 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:51:19 crc kubenswrapper[4911]: I0606 09:51:19.931921 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7bf0ed24-49d2-403e-afe0-25483f916433-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.362157 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" event={"ID":"7bf0ed24-49d2-403e-afe0-25483f916433","Type":"ContainerDied","Data":"5e4ffb4fb5d76a6b53c3b43648593f4040c225c4ed4d3363b55b680e325d8ab0"} Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.362468 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e4ffb4fb5d76a6b53c3b43648593f4040c225c4ed4d3363b55b680e325d8ab0" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.362259 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-csqcw" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.463847 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s"] Jun 06 09:51:20 crc kubenswrapper[4911]: E0606 09:51:20.464526 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1707f81-62cd-4b17-a203-3fc02ccb9c7e" containerName="container-00" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.464558 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1707f81-62cd-4b17-a203-3fc02ccb9c7e" containerName="container-00" Jun 06 09:51:20 crc kubenswrapper[4911]: E0606 09:51:20.464581 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bf0ed24-49d2-403e-afe0-25483f916433" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.464591 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bf0ed24-49d2-403e-afe0-25483f916433" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.464813 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bf0ed24-49d2-403e-afe0-25483f916433" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.464848 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1707f81-62cd-4b17-a203-3fc02ccb9c7e" containerName="container-00" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.465769 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.468124 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.468121 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.468439 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.468783 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.468846 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.468964 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.469175 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.472849 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s"] Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.545446 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skzrd\" (UniqueName: \"kubernetes.io/projected/8af0e0c9-e20f-479b-8622-49565f84eb2b-kube-api-access-skzrd\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.545523 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.545721 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.545788 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.545851 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.546109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.546277 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.546416 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.546508 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.649356 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.649430 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.649472 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.649612 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.649787 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.649851 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.649918 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.650057 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skzrd\" (UniqueName: \"kubernetes.io/projected/8af0e0c9-e20f-479b-8622-49565f84eb2b-kube-api-access-skzrd\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.650159 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.652636 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.653523 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.653883 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.654006 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.656280 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.656414 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.656660 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.659933 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.672441 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skzrd\" (UniqueName: \"kubernetes.io/projected/8af0e0c9-e20f-479b-8622-49565f84eb2b-kube-api-access-skzrd\") pod \"nova-edpm-deployment-openstack-edpm-ipam-w894s\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:20 crc kubenswrapper[4911]: I0606 09:51:20.787908 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:51:21 crc kubenswrapper[4911]: I0606 09:51:21.303521 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s"] Jun 06 09:51:21 crc kubenswrapper[4911]: I0606 09:51:21.307792 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:51:21 crc kubenswrapper[4911]: I0606 09:51:21.378797 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" event={"ID":"8af0e0c9-e20f-479b-8622-49565f84eb2b","Type":"ContainerStarted","Data":"e62aa199ff5cbdc8f9ec48c2bacadde9f80e626916ac7825d6012bc25699ccd9"} Jun 06 09:51:22 crc kubenswrapper[4911]: I0606 09:51:22.390429 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" event={"ID":"8af0e0c9-e20f-479b-8622-49565f84eb2b","Type":"ContainerStarted","Data":"18b96e2892888d77cf6b9f87b7de8ef204c776c122f2150e04ee15d8523094a3"} Jun 06 09:51:22 crc kubenswrapper[4911]: I0606 09:51:22.413466 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" podStartSLOduration=1.997529309 podStartE2EDuration="2.413448933s" podCreationTimestamp="2025-06-06 09:51:20 +0000 UTC" firstStartedPulling="2025-06-06 09:51:21.307602377 +0000 UTC m=+2292.583027920" lastFinishedPulling="2025-06-06 09:51:21.723522001 +0000 UTC m=+2292.998947544" observedRunningTime="2025-06-06 09:51:22.409943642 +0000 UTC m=+2293.685369195" watchObservedRunningTime="2025-06-06 09:51:22.413448933 +0000 UTC m=+2293.688874476" Jun 06 09:51:22 crc kubenswrapper[4911]: I0606 09:51:22.949440 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:51:22 crc kubenswrapper[4911]: E0606 09:51:22.949728 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:51:37 crc kubenswrapper[4911]: I0606 09:51:37.949508 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:51:37 crc kubenswrapper[4911]: E0606 09:51:37.950729 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:51:52 crc kubenswrapper[4911]: I0606 09:51:52.948536 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:51:52 crc kubenswrapper[4911]: E0606 09:51:52.949255 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:52:01 crc kubenswrapper[4911]: I0606 09:52:01.841803 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-8jjsh"] Jun 06 09:52:01 crc kubenswrapper[4911]: I0606 09:52:01.843671 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8jjsh" Jun 06 09:52:01 crc kubenswrapper[4911]: I0606 09:52:01.984109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svmnb\" (UniqueName: \"kubernetes.io/projected/c51258d5-70eb-4d04-99ee-d6fde808f365-kube-api-access-svmnb\") pod \"crc-debug-8jjsh\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " pod="openstack/crc-debug-8jjsh" Jun 06 09:52:01 crc kubenswrapper[4911]: I0606 09:52:01.984991 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51258d5-70eb-4d04-99ee-d6fde808f365-host\") pod \"crc-debug-8jjsh\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " pod="openstack/crc-debug-8jjsh" Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.087788 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51258d5-70eb-4d04-99ee-d6fde808f365-host\") pod \"crc-debug-8jjsh\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " pod="openstack/crc-debug-8jjsh" Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.087950 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51258d5-70eb-4d04-99ee-d6fde808f365-host\") pod \"crc-debug-8jjsh\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " pod="openstack/crc-debug-8jjsh" Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.087969 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svmnb\" (UniqueName: \"kubernetes.io/projected/c51258d5-70eb-4d04-99ee-d6fde808f365-kube-api-access-svmnb\") pod \"crc-debug-8jjsh\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " pod="openstack/crc-debug-8jjsh" Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.108873 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svmnb\" (UniqueName: \"kubernetes.io/projected/c51258d5-70eb-4d04-99ee-d6fde808f365-kube-api-access-svmnb\") pod \"crc-debug-8jjsh\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " pod="openstack/crc-debug-8jjsh" Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.165027 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8jjsh" Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.776880 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8jjsh" event={"ID":"c51258d5-70eb-4d04-99ee-d6fde808f365","Type":"ContainerStarted","Data":"685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed"} Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.777509 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8jjsh" event={"ID":"c51258d5-70eb-4d04-99ee-d6fde808f365","Type":"ContainerStarted","Data":"1d0d7260268a7f3515a4928be341ece3e7b5951b2be921291bba56dab82cbafb"} Jun 06 09:52:02 crc kubenswrapper[4911]: I0606 09:52:02.797972 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-8jjsh" podStartSLOduration=1.7979478850000001 podStartE2EDuration="1.797947885s" podCreationTimestamp="2025-06-06 09:52:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:52:02.794212377 +0000 UTC m=+2334.069637920" watchObservedRunningTime="2025-06-06 09:52:02.797947885 +0000 UTC m=+2334.073373428" Jun 06 09:52:03 crc kubenswrapper[4911]: I0606 09:52:03.949838 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:52:03 crc kubenswrapper[4911]: E0606 09:52:03.950407 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.017793 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7nbmv"] Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.021075 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.040622 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7nbmv"] Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.206979 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-utilities\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.207411 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5ktz\" (UniqueName: \"kubernetes.io/projected/3c5321d7-5d48-4707-8b3a-e930dcc3440b-kube-api-access-k5ktz\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.207900 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-catalog-content\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.310964 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-utilities\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.311538 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5ktz\" (UniqueName: \"kubernetes.io/projected/3c5321d7-5d48-4707-8b3a-e930dcc3440b-kube-api-access-k5ktz\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.311628 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-utilities\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.311944 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-catalog-content\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.312475 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-catalog-content\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.336261 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5ktz\" (UniqueName: \"kubernetes.io/projected/3c5321d7-5d48-4707-8b3a-e930dcc3440b-kube-api-access-k5ktz\") pod \"community-operators-7nbmv\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.349262 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.657678 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7nbmv"] Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.793166 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-8jjsh"] Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.793452 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-8jjsh" podUID="c51258d5-70eb-4d04-99ee-d6fde808f365" containerName="container-00" containerID="cri-o://685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed" gracePeriod=2 Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.801303 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-8jjsh"] Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.873525 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nbmv" event={"ID":"3c5321d7-5d48-4707-8b3a-e930dcc3440b","Type":"ContainerStarted","Data":"b01a0629b9e5785048c6380036141e0fe739de2fd8de243ade507a2a090cfaa2"} Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.874012 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8jjsh" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.875747 4911 generic.go:334] "Generic (PLEG): container finished" podID="c51258d5-70eb-4d04-99ee-d6fde808f365" containerID="685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed" exitCode=0 Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.875802 4911 scope.go:117] "RemoveContainer" containerID="685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.907624 4911 scope.go:117] "RemoveContainer" containerID="685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed" Jun 06 09:52:12 crc kubenswrapper[4911]: E0606 09:52:12.909566 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed\": container with ID starting with 685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed not found: ID does not exist" containerID="685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed" Jun 06 09:52:12 crc kubenswrapper[4911]: I0606 09:52:12.909619 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed"} err="failed to get container status \"685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed\": rpc error: code = NotFound desc = could not find container \"685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed\": container with ID starting with 685f16e5f08f1ae4780aae1bd0946f5d979f2daff6595898204656ef4a0321ed not found: ID does not exist" Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.028585 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51258d5-70eb-4d04-99ee-d6fde808f365-host\") pod \"c51258d5-70eb-4d04-99ee-d6fde808f365\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.028689 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c51258d5-70eb-4d04-99ee-d6fde808f365-host" (OuterVolumeSpecName: "host") pod "c51258d5-70eb-4d04-99ee-d6fde808f365" (UID: "c51258d5-70eb-4d04-99ee-d6fde808f365"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.028895 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svmnb\" (UniqueName: \"kubernetes.io/projected/c51258d5-70eb-4d04-99ee-d6fde808f365-kube-api-access-svmnb\") pod \"c51258d5-70eb-4d04-99ee-d6fde808f365\" (UID: \"c51258d5-70eb-4d04-99ee-d6fde808f365\") " Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.030436 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51258d5-70eb-4d04-99ee-d6fde808f365-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.035830 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c51258d5-70eb-4d04-99ee-d6fde808f365-kube-api-access-svmnb" (OuterVolumeSpecName: "kube-api-access-svmnb") pod "c51258d5-70eb-4d04-99ee-d6fde808f365" (UID: "c51258d5-70eb-4d04-99ee-d6fde808f365"). InnerVolumeSpecName "kube-api-access-svmnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.132923 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svmnb\" (UniqueName: \"kubernetes.io/projected/c51258d5-70eb-4d04-99ee-d6fde808f365-kube-api-access-svmnb\") on node \"crc\" DevicePath \"\"" Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.888800 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8jjsh" Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.891171 4911 generic.go:334] "Generic (PLEG): container finished" podID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerID="ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158" exitCode=0 Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.891323 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nbmv" event={"ID":"3c5321d7-5d48-4707-8b3a-e930dcc3440b","Type":"ContainerDied","Data":"ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158"} Jun 06 09:52:13 crc kubenswrapper[4911]: I0606 09:52:13.961058 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c51258d5-70eb-4d04-99ee-d6fde808f365" path="/var/lib/kubelet/pods/c51258d5-70eb-4d04-99ee-d6fde808f365/volumes" Jun 06 09:52:15 crc kubenswrapper[4911]: I0606 09:52:15.912675 4911 generic.go:334] "Generic (PLEG): container finished" podID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerID="b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430" exitCode=0 Jun 06 09:52:15 crc kubenswrapper[4911]: I0606 09:52:15.912728 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nbmv" event={"ID":"3c5321d7-5d48-4707-8b3a-e930dcc3440b","Type":"ContainerDied","Data":"b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430"} Jun 06 09:52:16 crc kubenswrapper[4911]: I0606 09:52:16.924980 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nbmv" event={"ID":"3c5321d7-5d48-4707-8b3a-e930dcc3440b","Type":"ContainerStarted","Data":"ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f"} Jun 06 09:52:16 crc kubenswrapper[4911]: I0606 09:52:16.948393 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7nbmv" podStartSLOduration=3.518605506 podStartE2EDuration="5.94837718s" podCreationTimestamp="2025-06-06 09:52:11 +0000 UTC" firstStartedPulling="2025-06-06 09:52:13.893627397 +0000 UTC m=+2345.169052930" lastFinishedPulling="2025-06-06 09:52:16.323399051 +0000 UTC m=+2347.598824604" observedRunningTime="2025-06-06 09:52:16.944178851 +0000 UTC m=+2348.219604404" watchObservedRunningTime="2025-06-06 09:52:16.94837718 +0000 UTC m=+2348.223802723" Jun 06 09:52:17 crc kubenswrapper[4911]: I0606 09:52:17.948459 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:52:17 crc kubenswrapper[4911]: E0606 09:52:17.948755 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:52:22 crc kubenswrapper[4911]: I0606 09:52:22.349837 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:22 crc kubenswrapper[4911]: I0606 09:52:22.350433 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:22 crc kubenswrapper[4911]: I0606 09:52:22.402252 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:23 crc kubenswrapper[4911]: I0606 09:52:23.030407 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:23 crc kubenswrapper[4911]: I0606 09:52:23.076401 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7nbmv"] Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.001392 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7nbmv" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="registry-server" containerID="cri-o://ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f" gracePeriod=2 Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.444986 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.581456 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-utilities\") pod \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.581608 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-catalog-content\") pod \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.581772 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5ktz\" (UniqueName: \"kubernetes.io/projected/3c5321d7-5d48-4707-8b3a-e930dcc3440b-kube-api-access-k5ktz\") pod \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\" (UID: \"3c5321d7-5d48-4707-8b3a-e930dcc3440b\") " Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.584080 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-utilities" (OuterVolumeSpecName: "utilities") pod "3c5321d7-5d48-4707-8b3a-e930dcc3440b" (UID: "3c5321d7-5d48-4707-8b3a-e930dcc3440b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.588478 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c5321d7-5d48-4707-8b3a-e930dcc3440b-kube-api-access-k5ktz" (OuterVolumeSpecName: "kube-api-access-k5ktz") pod "3c5321d7-5d48-4707-8b3a-e930dcc3440b" (UID: "3c5321d7-5d48-4707-8b3a-e930dcc3440b"). InnerVolumeSpecName "kube-api-access-k5ktz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.624348 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c5321d7-5d48-4707-8b3a-e930dcc3440b" (UID: "3c5321d7-5d48-4707-8b3a-e930dcc3440b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.684199 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.684249 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c5321d7-5d48-4707-8b3a-e930dcc3440b-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:52:25 crc kubenswrapper[4911]: I0606 09:52:25.684263 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5ktz\" (UniqueName: \"kubernetes.io/projected/3c5321d7-5d48-4707-8b3a-e930dcc3440b-kube-api-access-k5ktz\") on node \"crc\" DevicePath \"\"" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.011349 4911 generic.go:334] "Generic (PLEG): container finished" podID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerID="ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f" exitCode=0 Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.011412 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nbmv" event={"ID":"3c5321d7-5d48-4707-8b3a-e930dcc3440b","Type":"ContainerDied","Data":"ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f"} Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.011449 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7nbmv" event={"ID":"3c5321d7-5d48-4707-8b3a-e930dcc3440b","Type":"ContainerDied","Data":"b01a0629b9e5785048c6380036141e0fe739de2fd8de243ade507a2a090cfaa2"} Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.011468 4911 scope.go:117] "RemoveContainer" containerID="ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.011619 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7nbmv" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.049954 4911 scope.go:117] "RemoveContainer" containerID="b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.050972 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7nbmv"] Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.061045 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7nbmv"] Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.076630 4911 scope.go:117] "RemoveContainer" containerID="ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.125004 4911 scope.go:117] "RemoveContainer" containerID="ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f" Jun 06 09:52:26 crc kubenswrapper[4911]: E0606 09:52:26.125674 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f\": container with ID starting with ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f not found: ID does not exist" containerID="ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.125735 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f"} err="failed to get container status \"ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f\": rpc error: code = NotFound desc = could not find container \"ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f\": container with ID starting with ad7345ed8d34d66d28b827f83f7f6de2fb1b73bf537ffa627f5cffae6986851f not found: ID does not exist" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.125777 4911 scope.go:117] "RemoveContainer" containerID="b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430" Jun 06 09:52:26 crc kubenswrapper[4911]: E0606 09:52:26.126511 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430\": container with ID starting with b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430 not found: ID does not exist" containerID="b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.126555 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430"} err="failed to get container status \"b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430\": rpc error: code = NotFound desc = could not find container \"b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430\": container with ID starting with b0b8c2e3432a73e77d0fc2f39797355db7adfca6d88acdbcfd567ce83ac37430 not found: ID does not exist" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.126590 4911 scope.go:117] "RemoveContainer" containerID="ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158" Jun 06 09:52:26 crc kubenswrapper[4911]: E0606 09:52:26.126890 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158\": container with ID starting with ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158 not found: ID does not exist" containerID="ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158" Jun 06 09:52:26 crc kubenswrapper[4911]: I0606 09:52:26.126923 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158"} err="failed to get container status \"ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158\": rpc error: code = NotFound desc = could not find container \"ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158\": container with ID starting with ce9fb8aa7a461b2a502e25fd6a3050025b4a285ad454b17db1ba1535a5413158 not found: ID does not exist" Jun 06 09:52:27 crc kubenswrapper[4911]: I0606 09:52:27.959052 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" path="/var/lib/kubelet/pods/3c5321d7-5d48-4707-8b3a-e930dcc3440b/volumes" Jun 06 09:52:29 crc kubenswrapper[4911]: I0606 09:52:29.947432 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:52:29 crc kubenswrapper[4911]: E0606 09:52:29.947997 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:52:41 crc kubenswrapper[4911]: I0606 09:52:41.948000 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:52:41 crc kubenswrapper[4911]: E0606 09:52:41.948790 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:52:53 crc kubenswrapper[4911]: I0606 09:52:53.948386 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:52:53 crc kubenswrapper[4911]: E0606 09:52:53.949248 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.205759 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-gnf62"] Jun 06 09:53:02 crc kubenswrapper[4911]: E0606 09:53:02.206857 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="registry-server" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.206879 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="registry-server" Jun 06 09:53:02 crc kubenswrapper[4911]: E0606 09:53:02.206897 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="extract-utilities" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.206905 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="extract-utilities" Jun 06 09:53:02 crc kubenswrapper[4911]: E0606 09:53:02.206921 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c51258d5-70eb-4d04-99ee-d6fde808f365" containerName="container-00" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.206929 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c51258d5-70eb-4d04-99ee-d6fde808f365" containerName="container-00" Jun 06 09:53:02 crc kubenswrapper[4911]: E0606 09:53:02.206972 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="extract-content" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.206980 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="extract-content" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.207214 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5321d7-5d48-4707-8b3a-e930dcc3440b" containerName="registry-server" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.207248 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c51258d5-70eb-4d04-99ee-d6fde808f365" containerName="container-00" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.208105 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gnf62" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.261112 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmq4n\" (UniqueName: \"kubernetes.io/projected/84769959-8d18-444c-8a21-8a9cddfbb8b6-kube-api-access-jmq4n\") pod \"crc-debug-gnf62\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " pod="openstack/crc-debug-gnf62" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.261736 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84769959-8d18-444c-8a21-8a9cddfbb8b6-host\") pod \"crc-debug-gnf62\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " pod="openstack/crc-debug-gnf62" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.363519 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84769959-8d18-444c-8a21-8a9cddfbb8b6-host\") pod \"crc-debug-gnf62\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " pod="openstack/crc-debug-gnf62" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.363633 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmq4n\" (UniqueName: \"kubernetes.io/projected/84769959-8d18-444c-8a21-8a9cddfbb8b6-kube-api-access-jmq4n\") pod \"crc-debug-gnf62\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " pod="openstack/crc-debug-gnf62" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.363648 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84769959-8d18-444c-8a21-8a9cddfbb8b6-host\") pod \"crc-debug-gnf62\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " pod="openstack/crc-debug-gnf62" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.388857 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmq4n\" (UniqueName: \"kubernetes.io/projected/84769959-8d18-444c-8a21-8a9cddfbb8b6-kube-api-access-jmq4n\") pod \"crc-debug-gnf62\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " pod="openstack/crc-debug-gnf62" Jun 06 09:53:02 crc kubenswrapper[4911]: I0606 09:53:02.533615 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gnf62" Jun 06 09:53:03 crc kubenswrapper[4911]: I0606 09:53:03.371407 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-gnf62" event={"ID":"84769959-8d18-444c-8a21-8a9cddfbb8b6","Type":"ContainerStarted","Data":"c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694"} Jun 06 09:53:03 crc kubenswrapper[4911]: I0606 09:53:03.371914 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-gnf62" event={"ID":"84769959-8d18-444c-8a21-8a9cddfbb8b6","Type":"ContainerStarted","Data":"ca062333725af266de7f482410cf4476e94a558db73ecbdc3949350df308a512"} Jun 06 09:53:03 crc kubenswrapper[4911]: I0606 09:53:03.394478 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-gnf62" podStartSLOduration=1.394449361 podStartE2EDuration="1.394449361s" podCreationTimestamp="2025-06-06 09:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:53:03.385224201 +0000 UTC m=+2394.660649754" watchObservedRunningTime="2025-06-06 09:53:03.394449361 +0000 UTC m=+2394.669874904" Jun 06 09:53:05 crc kubenswrapper[4911]: I0606 09:53:05.951040 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:53:05 crc kubenswrapper[4911]: E0606 09:53:05.951873 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.143614 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-gnf62"] Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.144278 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-gnf62" podUID="84769959-8d18-444c-8a21-8a9cddfbb8b6" containerName="container-00" containerID="cri-o://c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694" gracePeriod=2 Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.153204 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-gnf62"] Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.261478 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gnf62" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.396056 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84769959-8d18-444c-8a21-8a9cddfbb8b6-host\") pod \"84769959-8d18-444c-8a21-8a9cddfbb8b6\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.396389 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmq4n\" (UniqueName: \"kubernetes.io/projected/84769959-8d18-444c-8a21-8a9cddfbb8b6-kube-api-access-jmq4n\") pod \"84769959-8d18-444c-8a21-8a9cddfbb8b6\" (UID: \"84769959-8d18-444c-8a21-8a9cddfbb8b6\") " Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.396179 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84769959-8d18-444c-8a21-8a9cddfbb8b6-host" (OuterVolumeSpecName: "host") pod "84769959-8d18-444c-8a21-8a9cddfbb8b6" (UID: "84769959-8d18-444c-8a21-8a9cddfbb8b6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.397159 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84769959-8d18-444c-8a21-8a9cddfbb8b6-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.402191 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84769959-8d18-444c-8a21-8a9cddfbb8b6-kube-api-access-jmq4n" (OuterVolumeSpecName: "kube-api-access-jmq4n") pod "84769959-8d18-444c-8a21-8a9cddfbb8b6" (UID: "84769959-8d18-444c-8a21-8a9cddfbb8b6"). InnerVolumeSpecName "kube-api-access-jmq4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.476531 4911 generic.go:334] "Generic (PLEG): container finished" podID="84769959-8d18-444c-8a21-8a9cddfbb8b6" containerID="c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694" exitCode=0 Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.476625 4911 scope.go:117] "RemoveContainer" containerID="c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.476914 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gnf62" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.499497 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmq4n\" (UniqueName: \"kubernetes.io/projected/84769959-8d18-444c-8a21-8a9cddfbb8b6-kube-api-access-jmq4n\") on node \"crc\" DevicePath \"\"" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.510494 4911 scope.go:117] "RemoveContainer" containerID="c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694" Jun 06 09:53:13 crc kubenswrapper[4911]: E0606 09:53:13.511011 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694\": container with ID starting with c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694 not found: ID does not exist" containerID="c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.511065 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694"} err="failed to get container status \"c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694\": rpc error: code = NotFound desc = could not find container \"c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694\": container with ID starting with c84def65b7246d95b8c5de0ba8b3957d492e0bdd9fba9f237358c442e9f02694 not found: ID does not exist" Jun 06 09:53:13 crc kubenswrapper[4911]: I0606 09:53:13.959979 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84769959-8d18-444c-8a21-8a9cddfbb8b6" path="/var/lib/kubelet/pods/84769959-8d18-444c-8a21-8a9cddfbb8b6/volumes" Jun 06 09:53:19 crc kubenswrapper[4911]: I0606 09:53:19.955659 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:53:19 crc kubenswrapper[4911]: E0606 09:53:19.956308 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:53:30 crc kubenswrapper[4911]: I0606 09:53:30.948428 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:53:30 crc kubenswrapper[4911]: E0606 09:53:30.949178 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:53:45 crc kubenswrapper[4911]: I0606 09:53:45.948146 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:53:45 crc kubenswrapper[4911]: E0606 09:53:45.949006 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:53:56 crc kubenswrapper[4911]: I0606 09:53:56.948615 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:53:56 crc kubenswrapper[4911]: E0606 09:53:56.949463 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.545706 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-mhcdj"] Jun 06 09:54:01 crc kubenswrapper[4911]: E0606 09:54:01.546713 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84769959-8d18-444c-8a21-8a9cddfbb8b6" containerName="container-00" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.546729 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="84769959-8d18-444c-8a21-8a9cddfbb8b6" containerName="container-00" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.546968 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="84769959-8d18-444c-8a21-8a9cddfbb8b6" containerName="container-00" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.547701 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mhcdj" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.634113 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lktw2\" (UniqueName: \"kubernetes.io/projected/dd680c65-8c64-487a-be82-4dcc61fabd69-kube-api-access-lktw2\") pod \"crc-debug-mhcdj\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " pod="openstack/crc-debug-mhcdj" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.634529 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd680c65-8c64-487a-be82-4dcc61fabd69-host\") pod \"crc-debug-mhcdj\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " pod="openstack/crc-debug-mhcdj" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.736639 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lktw2\" (UniqueName: \"kubernetes.io/projected/dd680c65-8c64-487a-be82-4dcc61fabd69-kube-api-access-lktw2\") pod \"crc-debug-mhcdj\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " pod="openstack/crc-debug-mhcdj" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.736811 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd680c65-8c64-487a-be82-4dcc61fabd69-host\") pod \"crc-debug-mhcdj\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " pod="openstack/crc-debug-mhcdj" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.737029 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd680c65-8c64-487a-be82-4dcc61fabd69-host\") pod \"crc-debug-mhcdj\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " pod="openstack/crc-debug-mhcdj" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.760905 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lktw2\" (UniqueName: \"kubernetes.io/projected/dd680c65-8c64-487a-be82-4dcc61fabd69-kube-api-access-lktw2\") pod \"crc-debug-mhcdj\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " pod="openstack/crc-debug-mhcdj" Jun 06 09:54:01 crc kubenswrapper[4911]: I0606 09:54:01.869598 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mhcdj" Jun 06 09:54:02 crc kubenswrapper[4911]: I0606 09:54:02.917564 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mhcdj" event={"ID":"dd680c65-8c64-487a-be82-4dcc61fabd69","Type":"ContainerStarted","Data":"1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88"} Jun 06 09:54:02 crc kubenswrapper[4911]: I0606 09:54:02.917894 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mhcdj" event={"ID":"dd680c65-8c64-487a-be82-4dcc61fabd69","Type":"ContainerStarted","Data":"96a1e4bfebfe88f3d2c6d56ee8a0ed65308f834931dee4d4421e6fe3040d52d0"} Jun 06 09:54:11 crc kubenswrapper[4911]: I0606 09:54:11.948669 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:54:11 crc kubenswrapper[4911]: E0606 09:54:11.949637 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.541613 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-mhcdj" podStartSLOduration=11.541583125 podStartE2EDuration="11.541583125s" podCreationTimestamp="2025-06-06 09:54:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:54:02.936227415 +0000 UTC m=+2454.211652978" watchObservedRunningTime="2025-06-06 09:54:12.541583125 +0000 UTC m=+2463.817008668" Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.545237 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-mhcdj"] Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.545521 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-mhcdj" podUID="dd680c65-8c64-487a-be82-4dcc61fabd69" containerName="container-00" containerID="cri-o://1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88" gracePeriod=2 Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.559002 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-mhcdj"] Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.748823 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mhcdj" Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.923260 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd680c65-8c64-487a-be82-4dcc61fabd69-host\") pod \"dd680c65-8c64-487a-be82-4dcc61fabd69\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.923518 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd680c65-8c64-487a-be82-4dcc61fabd69-host" (OuterVolumeSpecName: "host") pod "dd680c65-8c64-487a-be82-4dcc61fabd69" (UID: "dd680c65-8c64-487a-be82-4dcc61fabd69"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.923948 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lktw2\" (UniqueName: \"kubernetes.io/projected/dd680c65-8c64-487a-be82-4dcc61fabd69-kube-api-access-lktw2\") pod \"dd680c65-8c64-487a-be82-4dcc61fabd69\" (UID: \"dd680c65-8c64-487a-be82-4dcc61fabd69\") " Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.924781 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd680c65-8c64-487a-be82-4dcc61fabd69-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:12 crc kubenswrapper[4911]: I0606 09:54:12.930285 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd680c65-8c64-487a-be82-4dcc61fabd69-kube-api-access-lktw2" (OuterVolumeSpecName: "kube-api-access-lktw2") pod "dd680c65-8c64-487a-be82-4dcc61fabd69" (UID: "dd680c65-8c64-487a-be82-4dcc61fabd69"). InnerVolumeSpecName "kube-api-access-lktw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.026871 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lktw2\" (UniqueName: \"kubernetes.io/projected/dd680c65-8c64-487a-be82-4dcc61fabd69-kube-api-access-lktw2\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.042911 4911 generic.go:334] "Generic (PLEG): container finished" podID="dd680c65-8c64-487a-be82-4dcc61fabd69" containerID="1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88" exitCode=0 Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.042963 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mhcdj" Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.042994 4911 scope.go:117] "RemoveContainer" containerID="1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88" Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.045219 4911 generic.go:334] "Generic (PLEG): container finished" podID="8af0e0c9-e20f-479b-8622-49565f84eb2b" containerID="18b96e2892888d77cf6b9f87b7de8ef204c776c122f2150e04ee15d8523094a3" exitCode=0 Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.045257 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" event={"ID":"8af0e0c9-e20f-479b-8622-49565f84eb2b","Type":"ContainerDied","Data":"18b96e2892888d77cf6b9f87b7de8ef204c776c122f2150e04ee15d8523094a3"} Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.067144 4911 scope.go:117] "RemoveContainer" containerID="1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88" Jun 06 09:54:13 crc kubenswrapper[4911]: E0606 09:54:13.068068 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88\": container with ID starting with 1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88 not found: ID does not exist" containerID="1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88" Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.068126 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88"} err="failed to get container status \"1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88\": rpc error: code = NotFound desc = could not find container \"1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88\": container with ID starting with 1f841ef9285945415dfd824d84eda68a65bcb71a2ea7f369f161a912095bba88 not found: ID does not exist" Jun 06 09:54:13 crc kubenswrapper[4911]: I0606 09:54:13.960173 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd680c65-8c64-487a-be82-4dcc61fabd69" path="/var/lib/kubelet/pods/dd680c65-8c64-487a-be82-4dcc61fabd69/volumes" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.527402 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.661758 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-1\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.661805 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-combined-ca-bundle\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.661833 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-extra-config-0\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.661859 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-0\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.661953 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-inventory\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.662000 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-1\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.662022 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-ssh-key\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.662102 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-0\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.662146 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skzrd\" (UniqueName: \"kubernetes.io/projected/8af0e0c9-e20f-479b-8622-49565f84eb2b-kube-api-access-skzrd\") pod \"8af0e0c9-e20f-479b-8622-49565f84eb2b\" (UID: \"8af0e0c9-e20f-479b-8622-49565f84eb2b\") " Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.669007 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.670281 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8af0e0c9-e20f-479b-8622-49565f84eb2b-kube-api-access-skzrd" (OuterVolumeSpecName: "kube-api-access-skzrd") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "kube-api-access-skzrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.700207 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-inventory" (OuterVolumeSpecName: "inventory") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.700895 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.710860 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.712899 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.713966 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.719639 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.735264 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "8af0e0c9-e20f-479b-8622-49565f84eb2b" (UID: "8af0e0c9-e20f-479b-8622-49565f84eb2b"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765190 4911 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765249 4911 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765263 4911 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765275 4911 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765288 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765300 4911 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765313 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765330 4911 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8af0e0c9-e20f-479b-8622-49565f84eb2b-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:14 crc kubenswrapper[4911]: I0606 09:54:14.765341 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skzrd\" (UniqueName: \"kubernetes.io/projected/8af0e0c9-e20f-479b-8622-49565f84eb2b-kube-api-access-skzrd\") on node \"crc\" DevicePath \"\"" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.067165 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" event={"ID":"8af0e0c9-e20f-479b-8622-49565f84eb2b","Type":"ContainerDied","Data":"e62aa199ff5cbdc8f9ec48c2bacadde9f80e626916ac7825d6012bc25699ccd9"} Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.067206 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e62aa199ff5cbdc8f9ec48c2bacadde9f80e626916ac7825d6012bc25699ccd9" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.067237 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-w894s" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.153599 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t"] Jun 06 09:54:15 crc kubenswrapper[4911]: E0606 09:54:15.154343 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8af0e0c9-e20f-479b-8622-49565f84eb2b" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.154374 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8af0e0c9-e20f-479b-8622-49565f84eb2b" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jun 06 09:54:15 crc kubenswrapper[4911]: E0606 09:54:15.154404 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd680c65-8c64-487a-be82-4dcc61fabd69" containerName="container-00" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.154413 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd680c65-8c64-487a-be82-4dcc61fabd69" containerName="container-00" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.154623 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8af0e0c9-e20f-479b-8622-49565f84eb2b" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.154641 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd680c65-8c64-487a-be82-4dcc61fabd69" containerName="container-00" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.155413 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.158218 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.158310 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.158343 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.158742 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xb2st" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.158788 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.169058 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t"] Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275221 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275309 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275368 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-3\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275385 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275424 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275575 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275612 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275638 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.275826 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcw5c\" (UniqueName: \"kubernetes.io/projected/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-kube-api-access-bcw5c\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377255 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377302 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377326 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377368 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcw5c\" (UniqueName: \"kubernetes.io/projected/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-kube-api-access-bcw5c\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377411 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377446 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377479 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-3\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377497 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.377535 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.381475 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.381642 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.381895 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.382244 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-3\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.382296 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.382836 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.382936 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.383562 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.395624 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcw5c\" (UniqueName: \"kubernetes.io/projected/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-kube-api-access-bcw5c\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.475233 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:54:15 crc kubenswrapper[4911]: I0606 09:54:15.995981 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t"] Jun 06 09:54:16 crc kubenswrapper[4911]: I0606 09:54:16.077736 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" event={"ID":"efc7deb8-a82d-4b0d-b3f9-200faf558eeb","Type":"ContainerStarted","Data":"dbcc287116323db28957c7f8752887d25675aba33e4e08ae89e6ec0c039b6ade"} Jun 06 09:54:17 crc kubenswrapper[4911]: I0606 09:54:17.100921 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" event={"ID":"efc7deb8-a82d-4b0d-b3f9-200faf558eeb","Type":"ContainerStarted","Data":"e16a387a7d0a2306279becc6a16dae30553adc868bf95369b0bdcd59580d076e"} Jun 06 09:54:17 crc kubenswrapper[4911]: I0606 09:54:17.133115 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" podStartSLOduration=1.621852104 podStartE2EDuration="2.133065588s" podCreationTimestamp="2025-06-06 09:54:15 +0000 UTC" firstStartedPulling="2025-06-06 09:54:16.006076012 +0000 UTC m=+2467.281501555" lastFinishedPulling="2025-06-06 09:54:16.517289496 +0000 UTC m=+2467.792715039" observedRunningTime="2025-06-06 09:54:17.125618355 +0000 UTC m=+2468.401043898" watchObservedRunningTime="2025-06-06 09:54:17.133065588 +0000 UTC m=+2468.408491131" Jun 06 09:54:24 crc kubenswrapper[4911]: I0606 09:54:24.948775 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:54:24 crc kubenswrapper[4911]: E0606 09:54:24.949822 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:54:39 crc kubenswrapper[4911]: I0606 09:54:39.956438 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:54:39 crc kubenswrapper[4911]: E0606 09:54:39.957511 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:54:53 crc kubenswrapper[4911]: I0606 09:54:53.947986 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:54:53 crc kubenswrapper[4911]: E0606 09:54:53.948862 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 09:55:01 crc kubenswrapper[4911]: I0606 09:55:01.892770 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-xgvv8"] Jun 06 09:55:01 crc kubenswrapper[4911]: I0606 09:55:01.895161 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.003231 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/45664d28-c1ad-4aed-9c68-801ec2da7c1b-host\") pod \"crc-debug-xgvv8\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.003522 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqb44\" (UniqueName: \"kubernetes.io/projected/45664d28-c1ad-4aed-9c68-801ec2da7c1b-kube-api-access-mqb44\") pod \"crc-debug-xgvv8\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.105472 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/45664d28-c1ad-4aed-9c68-801ec2da7c1b-host\") pod \"crc-debug-xgvv8\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.105534 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/45664d28-c1ad-4aed-9c68-801ec2da7c1b-host\") pod \"crc-debug-xgvv8\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.105765 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqb44\" (UniqueName: \"kubernetes.io/projected/45664d28-c1ad-4aed-9c68-801ec2da7c1b-kube-api-access-mqb44\") pod \"crc-debug-xgvv8\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.127207 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqb44\" (UniqueName: \"kubernetes.io/projected/45664d28-c1ad-4aed-9c68-801ec2da7c1b-kube-api-access-mqb44\") pod \"crc-debug-xgvv8\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.216878 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xgvv8" Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.556880 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-xgvv8" event={"ID":"45664d28-c1ad-4aed-9c68-801ec2da7c1b","Type":"ContainerStarted","Data":"f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77"} Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.557510 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-xgvv8" event={"ID":"45664d28-c1ad-4aed-9c68-801ec2da7c1b","Type":"ContainerStarted","Data":"3067c1a6df9aaba7f22cdcbd211ddb29dfc2c47a22bedc54d619dc73618ff3a0"} Jun 06 09:55:02 crc kubenswrapper[4911]: I0606 09:55:02.578731 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-xgvv8" podStartSLOduration=1.578707467 podStartE2EDuration="1.578707467s" podCreationTimestamp="2025-06-06 09:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:55:02.575392771 +0000 UTC m=+2513.850818314" watchObservedRunningTime="2025-06-06 09:55:02.578707467 +0000 UTC m=+2513.854133010" Jun 06 09:55:05 crc kubenswrapper[4911]: I0606 09:55:05.951240 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:55:06 crc kubenswrapper[4911]: I0606 09:55:06.594510 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"a26b21cd2dacce99add5f3271857622702e798058ab60957fc172d6f9b03523a"} Jun 06 09:55:12 crc kubenswrapper[4911]: I0606 09:55:12.916438 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-xgvv8"] Jun 06 09:55:12 crc kubenswrapper[4911]: I0606 09:55:12.917997 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-xgvv8" podUID="45664d28-c1ad-4aed-9c68-801ec2da7c1b" containerName="container-00" containerID="cri-o://f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77" gracePeriod=2 Jun 06 09:55:12 crc kubenswrapper[4911]: I0606 09:55:12.925655 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-xgvv8"] Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.031125 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xgvv8" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.104648 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqb44\" (UniqueName: \"kubernetes.io/projected/45664d28-c1ad-4aed-9c68-801ec2da7c1b-kube-api-access-mqb44\") pod \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.104896 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/45664d28-c1ad-4aed-9c68-801ec2da7c1b-host\") pod \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\" (UID: \"45664d28-c1ad-4aed-9c68-801ec2da7c1b\") " Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.106148 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45664d28-c1ad-4aed-9c68-801ec2da7c1b-host" (OuterVolumeSpecName: "host") pod "45664d28-c1ad-4aed-9c68-801ec2da7c1b" (UID: "45664d28-c1ad-4aed-9c68-801ec2da7c1b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.114813 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45664d28-c1ad-4aed-9c68-801ec2da7c1b-kube-api-access-mqb44" (OuterVolumeSpecName: "kube-api-access-mqb44") pod "45664d28-c1ad-4aed-9c68-801ec2da7c1b" (UID: "45664d28-c1ad-4aed-9c68-801ec2da7c1b"). InnerVolumeSpecName "kube-api-access-mqb44". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.207183 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/45664d28-c1ad-4aed-9c68-801ec2da7c1b-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.207218 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqb44\" (UniqueName: \"kubernetes.io/projected/45664d28-c1ad-4aed-9c68-801ec2da7c1b-kube-api-access-mqb44\") on node \"crc\" DevicePath \"\"" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.669040 4911 generic.go:334] "Generic (PLEG): container finished" podID="45664d28-c1ad-4aed-9c68-801ec2da7c1b" containerID="f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77" exitCode=0 Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.669104 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xgvv8" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.669166 4911 scope.go:117] "RemoveContainer" containerID="f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.698744 4911 scope.go:117] "RemoveContainer" containerID="f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77" Jun 06 09:55:13 crc kubenswrapper[4911]: E0606 09:55:13.699183 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77\": container with ID starting with f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77 not found: ID does not exist" containerID="f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.699717 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77"} err="failed to get container status \"f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77\": rpc error: code = NotFound desc = could not find container \"f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77\": container with ID starting with f7ffc2a78d1dd1f680a2f862a98c49036e69e31b5b9c9c5b70e5a098f9a00f77 not found: ID does not exist" Jun 06 09:55:13 crc kubenswrapper[4911]: I0606 09:55:13.960135 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45664d28-c1ad-4aed-9c68-801ec2da7c1b" path="/var/lib/kubelet/pods/45664d28-c1ad-4aed-9c68-801ec2da7c1b/volumes" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.277010 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-9tffp"] Jun 06 09:56:02 crc kubenswrapper[4911]: E0606 09:56:02.277950 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45664d28-c1ad-4aed-9c68-801ec2da7c1b" containerName="container-00" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.277969 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="45664d28-c1ad-4aed-9c68-801ec2da7c1b" containerName="container-00" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.278278 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="45664d28-c1ad-4aed-9c68-801ec2da7c1b" containerName="container-00" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.279002 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9tffp" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.366469 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fce5923d-19cd-4fc6-85c1-3da27a3332e2-host\") pod \"crc-debug-9tffp\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " pod="openstack/crc-debug-9tffp" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.366555 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf4s8\" (UniqueName: \"kubernetes.io/projected/fce5923d-19cd-4fc6-85c1-3da27a3332e2-kube-api-access-tf4s8\") pod \"crc-debug-9tffp\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " pod="openstack/crc-debug-9tffp" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.469421 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fce5923d-19cd-4fc6-85c1-3da27a3332e2-host\") pod \"crc-debug-9tffp\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " pod="openstack/crc-debug-9tffp" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.469529 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf4s8\" (UniqueName: \"kubernetes.io/projected/fce5923d-19cd-4fc6-85c1-3da27a3332e2-kube-api-access-tf4s8\") pod \"crc-debug-9tffp\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " pod="openstack/crc-debug-9tffp" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.469625 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fce5923d-19cd-4fc6-85c1-3da27a3332e2-host\") pod \"crc-debug-9tffp\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " pod="openstack/crc-debug-9tffp" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.491746 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf4s8\" (UniqueName: \"kubernetes.io/projected/fce5923d-19cd-4fc6-85c1-3da27a3332e2-kube-api-access-tf4s8\") pod \"crc-debug-9tffp\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " pod="openstack/crc-debug-9tffp" Jun 06 09:56:02 crc kubenswrapper[4911]: I0606 09:56:02.605898 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9tffp" Jun 06 09:56:03 crc kubenswrapper[4911]: I0606 09:56:03.179786 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-9tffp" event={"ID":"fce5923d-19cd-4fc6-85c1-3da27a3332e2","Type":"ContainerStarted","Data":"d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e"} Jun 06 09:56:03 crc kubenswrapper[4911]: I0606 09:56:03.180178 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-9tffp" event={"ID":"fce5923d-19cd-4fc6-85c1-3da27a3332e2","Type":"ContainerStarted","Data":"de41c8cd308a2505610f7ce40db3676cae6ac28b21bf05d1c748e96490de963e"} Jun 06 09:56:03 crc kubenswrapper[4911]: I0606 09:56:03.200319 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-9tffp" podStartSLOduration=1.200295822 podStartE2EDuration="1.200295822s" podCreationTimestamp="2025-06-06 09:56:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:56:03.191952606 +0000 UTC m=+2574.467378159" watchObservedRunningTime="2025-06-06 09:56:03.200295822 +0000 UTC m=+2574.475721365" Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.338953 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-9tffp"] Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.339900 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-9tffp" podUID="fce5923d-19cd-4fc6-85c1-3da27a3332e2" containerName="container-00" containerID="cri-o://d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e" gracePeriod=2 Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.348600 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-9tffp"] Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.444305 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9tffp" Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.514840 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fce5923d-19cd-4fc6-85c1-3da27a3332e2-host\") pod \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.514911 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fce5923d-19cd-4fc6-85c1-3da27a3332e2-host" (OuterVolumeSpecName: "host") pod "fce5923d-19cd-4fc6-85c1-3da27a3332e2" (UID: "fce5923d-19cd-4fc6-85c1-3da27a3332e2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.515043 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf4s8\" (UniqueName: \"kubernetes.io/projected/fce5923d-19cd-4fc6-85c1-3da27a3332e2-kube-api-access-tf4s8\") pod \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\" (UID: \"fce5923d-19cd-4fc6-85c1-3da27a3332e2\") " Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.515801 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fce5923d-19cd-4fc6-85c1-3da27a3332e2-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.520522 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce5923d-19cd-4fc6-85c1-3da27a3332e2-kube-api-access-tf4s8" (OuterVolumeSpecName: "kube-api-access-tf4s8") pod "fce5923d-19cd-4fc6-85c1-3da27a3332e2" (UID: "fce5923d-19cd-4fc6-85c1-3da27a3332e2"). InnerVolumeSpecName "kube-api-access-tf4s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.618852 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf4s8\" (UniqueName: \"kubernetes.io/projected/fce5923d-19cd-4fc6-85c1-3da27a3332e2-kube-api-access-tf4s8\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:13 crc kubenswrapper[4911]: I0606 09:56:13.962004 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce5923d-19cd-4fc6-85c1-3da27a3332e2" path="/var/lib/kubelet/pods/fce5923d-19cd-4fc6-85c1-3da27a3332e2/volumes" Jun 06 09:56:14 crc kubenswrapper[4911]: I0606 09:56:14.297323 4911 generic.go:334] "Generic (PLEG): container finished" podID="fce5923d-19cd-4fc6-85c1-3da27a3332e2" containerID="d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e" exitCode=0 Jun 06 09:56:14 crc kubenswrapper[4911]: I0606 09:56:14.297402 4911 scope.go:117] "RemoveContainer" containerID="d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e" Jun 06 09:56:14 crc kubenswrapper[4911]: I0606 09:56:14.297405 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9tffp" Jun 06 09:56:14 crc kubenswrapper[4911]: I0606 09:56:14.319267 4911 scope.go:117] "RemoveContainer" containerID="d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e" Jun 06 09:56:14 crc kubenswrapper[4911]: E0606 09:56:14.319770 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e\": container with ID starting with d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e not found: ID does not exist" containerID="d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e" Jun 06 09:56:14 crc kubenswrapper[4911]: I0606 09:56:14.319832 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e"} err="failed to get container status \"d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e\": rpc error: code = NotFound desc = could not find container \"d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e\": container with ID starting with d47ebe7a7f3d26cb7c5b0709c3ace61fb2fba065b1a4cccbf6eb7876d039a59e not found: ID does not exist" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.558143 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f9kbg"] Jun 06 09:56:33 crc kubenswrapper[4911]: E0606 09:56:33.559183 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce5923d-19cd-4fc6-85c1-3da27a3332e2" containerName="container-00" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.559197 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce5923d-19cd-4fc6-85c1-3da27a3332e2" containerName="container-00" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.559399 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce5923d-19cd-4fc6-85c1-3da27a3332e2" containerName="container-00" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.561045 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.573348 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f9kbg"] Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.621476 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-catalog-content\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.621756 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm27h\" (UniqueName: \"kubernetes.io/projected/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-kube-api-access-tm27h\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.622009 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-utilities\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.723995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-catalog-content\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.724445 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm27h\" (UniqueName: \"kubernetes.io/projected/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-kube-api-access-tm27h\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.724540 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-utilities\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.724585 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-catalog-content\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.725193 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-utilities\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.750211 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm27h\" (UniqueName: \"kubernetes.io/projected/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-kube-api-access-tm27h\") pod \"redhat-marketplace-f9kbg\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.765294 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rd8cd"] Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.774141 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.777344 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rd8cd"] Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.826721 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-utilities\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.827031 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-catalog-content\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.827300 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt4z5\" (UniqueName: \"kubernetes.io/projected/1a24c199-7a55-484c-9b82-88cb46f910f6-kube-api-access-qt4z5\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.891385 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.929198 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-catalog-content\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.929292 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt4z5\" (UniqueName: \"kubernetes.io/projected/1a24c199-7a55-484c-9b82-88cb46f910f6-kube-api-access-qt4z5\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.929384 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-utilities\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.929925 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-utilities\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.930249 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-catalog-content\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:33 crc kubenswrapper[4911]: I0606 09:56:33.950906 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt4z5\" (UniqueName: \"kubernetes.io/projected/1a24c199-7a55-484c-9b82-88cb46f910f6-kube-api-access-qt4z5\") pod \"redhat-operators-rd8cd\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:34 crc kubenswrapper[4911]: I0606 09:56:34.112266 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:34 crc kubenswrapper[4911]: I0606 09:56:34.358614 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f9kbg"] Jun 06 09:56:34 crc kubenswrapper[4911]: I0606 09:56:34.481484 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9kbg" event={"ID":"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b","Type":"ContainerStarted","Data":"86bef4af0e4763d9201f6f9684ef4f41129683577f8f276d4a83b387180f2347"} Jun 06 09:56:34 crc kubenswrapper[4911]: I0606 09:56:34.610525 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rd8cd"] Jun 06 09:56:34 crc kubenswrapper[4911]: W0606 09:56:34.648753 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a24c199_7a55_484c_9b82_88cb46f910f6.slice/crio-1e89785bb28af2662de2bd75be1a1b5116a9fa8c5d7559cc26cea1bc0068abdc WatchSource:0}: Error finding container 1e89785bb28af2662de2bd75be1a1b5116a9fa8c5d7559cc26cea1bc0068abdc: Status 404 returned error can't find the container with id 1e89785bb28af2662de2bd75be1a1b5116a9fa8c5d7559cc26cea1bc0068abdc Jun 06 09:56:35 crc kubenswrapper[4911]: I0606 09:56:35.495533 4911 generic.go:334] "Generic (PLEG): container finished" podID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerID="2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f" exitCode=0 Jun 06 09:56:35 crc kubenswrapper[4911]: I0606 09:56:35.496751 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9kbg" event={"ID":"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b","Type":"ContainerDied","Data":"2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f"} Jun 06 09:56:35 crc kubenswrapper[4911]: I0606 09:56:35.497969 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 09:56:35 crc kubenswrapper[4911]: I0606 09:56:35.500110 4911 generic.go:334] "Generic (PLEG): container finished" podID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerID="8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b" exitCode=0 Jun 06 09:56:35 crc kubenswrapper[4911]: I0606 09:56:35.500145 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rd8cd" event={"ID":"1a24c199-7a55-484c-9b82-88cb46f910f6","Type":"ContainerDied","Data":"8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b"} Jun 06 09:56:35 crc kubenswrapper[4911]: I0606 09:56:35.500171 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rd8cd" event={"ID":"1a24c199-7a55-484c-9b82-88cb46f910f6","Type":"ContainerStarted","Data":"1e89785bb28af2662de2bd75be1a1b5116a9fa8c5d7559cc26cea1bc0068abdc"} Jun 06 09:56:36 crc kubenswrapper[4911]: I0606 09:56:36.512563 4911 generic.go:334] "Generic (PLEG): container finished" podID="efc7deb8-a82d-4b0d-b3f9-200faf558eeb" containerID="e16a387a7d0a2306279becc6a16dae30553adc868bf95369b0bdcd59580d076e" exitCode=0 Jun 06 09:56:36 crc kubenswrapper[4911]: I0606 09:56:36.512677 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" event={"ID":"efc7deb8-a82d-4b0d-b3f9-200faf558eeb","Type":"ContainerDied","Data":"e16a387a7d0a2306279becc6a16dae30553adc868bf95369b0bdcd59580d076e"} Jun 06 09:56:36 crc kubenswrapper[4911]: I0606 09:56:36.518716 4911 generic.go:334] "Generic (PLEG): container finished" podID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerID="078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3" exitCode=0 Jun 06 09:56:36 crc kubenswrapper[4911]: I0606 09:56:36.518775 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9kbg" event={"ID":"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b","Type":"ContainerDied","Data":"078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3"} Jun 06 09:56:37 crc kubenswrapper[4911]: I0606 09:56:37.529672 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9kbg" event={"ID":"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b","Type":"ContainerStarted","Data":"d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b"} Jun 06 09:56:37 crc kubenswrapper[4911]: I0606 09:56:37.532462 4911 generic.go:334] "Generic (PLEG): container finished" podID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerID="205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af" exitCode=0 Jun 06 09:56:37 crc kubenswrapper[4911]: I0606 09:56:37.533289 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rd8cd" event={"ID":"1a24c199-7a55-484c-9b82-88cb46f910f6","Type":"ContainerDied","Data":"205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af"} Jun 06 09:56:37 crc kubenswrapper[4911]: I0606 09:56:37.559155 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f9kbg" podStartSLOduration=2.862197633 podStartE2EDuration="4.559135751s" podCreationTimestamp="2025-06-06 09:56:33 +0000 UTC" firstStartedPulling="2025-06-06 09:56:35.497736915 +0000 UTC m=+2606.773162458" lastFinishedPulling="2025-06-06 09:56:37.194675033 +0000 UTC m=+2608.470100576" observedRunningTime="2025-06-06 09:56:37.552862388 +0000 UTC m=+2608.828287961" watchObservedRunningTime="2025-06-06 09:56:37.559135751 +0000 UTC m=+2608.834561294" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.029629 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126284 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-1\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126422 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-2\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126453 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-3\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126509 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-telemetry-combined-ca-bundle\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126553 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ssh-key\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126594 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcw5c\" (UniqueName: \"kubernetes.io/projected/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-kube-api-access-bcw5c\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126647 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-inventory\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126671 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-4\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.126709 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-0\") pod \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\" (UID: \"efc7deb8-a82d-4b0d-b3f9-200faf558eeb\") " Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.132588 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.132976 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-kube-api-access-bcw5c" (OuterVolumeSpecName: "kube-api-access-bcw5c") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "kube-api-access-bcw5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.154989 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.155713 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-inventory" (OuterVolumeSpecName: "inventory") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.156970 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-3" (OuterVolumeSpecName: "ceilometer-compute-config-data-3") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "ceilometer-compute-config-data-3". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.161201 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.163494 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.164251 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-4" (OuterVolumeSpecName: "ceilometer-compute-config-data-4") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "ceilometer-compute-config-data-4". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.165299 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "efc7deb8-a82d-4b0d-b3f9-200faf558eeb" (UID: "efc7deb8-a82d-4b0d-b3f9-200faf558eeb"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230160 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230197 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230208 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-3\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-3\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230219 4911 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230228 4911 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ssh-key\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230239 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcw5c\" (UniqueName: \"kubernetes.io/projected/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-kube-api-access-bcw5c\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230248 4911 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-inventory\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230256 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-4\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-4\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.230265 4911 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efc7deb8-a82d-4b0d-b3f9-200faf558eeb-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.547328 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rd8cd" event={"ID":"1a24c199-7a55-484c-9b82-88cb46f910f6","Type":"ContainerStarted","Data":"6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae"} Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.549737 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.549788 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t" event={"ID":"efc7deb8-a82d-4b0d-b3f9-200faf558eeb","Type":"ContainerDied","Data":"dbcc287116323db28957c7f8752887d25675aba33e4e08ae89e6ec0c039b6ade"} Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.549818 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbcc287116323db28957c7f8752887d25675aba33e4e08ae89e6ec0c039b6ade" Jun 06 09:56:38 crc kubenswrapper[4911]: I0606 09:56:38.570896 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rd8cd" podStartSLOduration=2.899344497 podStartE2EDuration="5.570879486s" podCreationTimestamp="2025-06-06 09:56:33 +0000 UTC" firstStartedPulling="2025-06-06 09:56:35.502309574 +0000 UTC m=+2606.777735137" lastFinishedPulling="2025-06-06 09:56:38.173844583 +0000 UTC m=+2609.449270126" observedRunningTime="2025-06-06 09:56:38.56714529 +0000 UTC m=+2609.842570833" watchObservedRunningTime="2025-06-06 09:56:38.570879486 +0000 UTC m=+2609.846305029" Jun 06 09:56:43 crc kubenswrapper[4911]: I0606 09:56:43.891761 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:43 crc kubenswrapper[4911]: I0606 09:56:43.892470 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:43 crc kubenswrapper[4911]: I0606 09:56:43.941194 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:44 crc kubenswrapper[4911]: I0606 09:56:44.112549 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:44 crc kubenswrapper[4911]: I0606 09:56:44.112905 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:44 crc kubenswrapper[4911]: I0606 09:56:44.163282 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:44 crc kubenswrapper[4911]: I0606 09:56:44.658310 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:44 crc kubenswrapper[4911]: I0606 09:56:44.667766 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:45 crc kubenswrapper[4911]: I0606 09:56:45.547475 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rd8cd"] Jun 06 09:56:46 crc kubenswrapper[4911]: I0606 09:56:46.620077 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rd8cd" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="registry-server" containerID="cri-o://6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae" gracePeriod=2 Jun 06 09:56:46 crc kubenswrapper[4911]: I0606 09:56:46.952667 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f9kbg"] Jun 06 09:56:46 crc kubenswrapper[4911]: I0606 09:56:46.953414 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f9kbg" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="registry-server" containerID="cri-o://d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b" gracePeriod=2 Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.172148 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.231680 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-utilities\") pod \"1a24c199-7a55-484c-9b82-88cb46f910f6\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.231756 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-catalog-content\") pod \"1a24c199-7a55-484c-9b82-88cb46f910f6\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.231807 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt4z5\" (UniqueName: \"kubernetes.io/projected/1a24c199-7a55-484c-9b82-88cb46f910f6-kube-api-access-qt4z5\") pod \"1a24c199-7a55-484c-9b82-88cb46f910f6\" (UID: \"1a24c199-7a55-484c-9b82-88cb46f910f6\") " Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.237487 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-utilities" (OuterVolumeSpecName: "utilities") pod "1a24c199-7a55-484c-9b82-88cb46f910f6" (UID: "1a24c199-7a55-484c-9b82-88cb46f910f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.239420 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a24c199-7a55-484c-9b82-88cb46f910f6-kube-api-access-qt4z5" (OuterVolumeSpecName: "kube-api-access-qt4z5") pod "1a24c199-7a55-484c-9b82-88cb46f910f6" (UID: "1a24c199-7a55-484c-9b82-88cb46f910f6"). InnerVolumeSpecName "kube-api-access-qt4z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.342585 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.342641 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt4z5\" (UniqueName: \"kubernetes.io/projected/1a24c199-7a55-484c-9b82-88cb46f910f6-kube-api-access-qt4z5\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.402216 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.443649 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm27h\" (UniqueName: \"kubernetes.io/projected/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-kube-api-access-tm27h\") pod \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.443703 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-utilities\") pod \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.443781 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-catalog-content\") pod \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\" (UID: \"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b\") " Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.444596 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-utilities" (OuterVolumeSpecName: "utilities") pod "5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" (UID: "5aa5de25-eb65-4fec-8ef8-5d0e1621c46b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.450432 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-kube-api-access-tm27h" (OuterVolumeSpecName: "kube-api-access-tm27h") pod "5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" (UID: "5aa5de25-eb65-4fec-8ef8-5d0e1621c46b"). InnerVolumeSpecName "kube-api-access-tm27h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.455583 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" (UID: "5aa5de25-eb65-4fec-8ef8-5d0e1621c46b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.545909 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm27h\" (UniqueName: \"kubernetes.io/projected/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-kube-api-access-tm27h\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.545962 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.545977 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.618593 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a24c199-7a55-484c-9b82-88cb46f910f6" (UID: "1a24c199-7a55-484c-9b82-88cb46f910f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.631632 4911 generic.go:334] "Generic (PLEG): container finished" podID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerID="d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b" exitCode=0 Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.631710 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f9kbg" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.631723 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9kbg" event={"ID":"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b","Type":"ContainerDied","Data":"d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b"} Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.631752 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9kbg" event={"ID":"5aa5de25-eb65-4fec-8ef8-5d0e1621c46b","Type":"ContainerDied","Data":"86bef4af0e4763d9201f6f9684ef4f41129683577f8f276d4a83b387180f2347"} Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.631769 4911 scope.go:117] "RemoveContainer" containerID="d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.635548 4911 generic.go:334] "Generic (PLEG): container finished" podID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerID="6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae" exitCode=0 Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.635588 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rd8cd" event={"ID":"1a24c199-7a55-484c-9b82-88cb46f910f6","Type":"ContainerDied","Data":"6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae"} Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.635600 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rd8cd" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.635616 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rd8cd" event={"ID":"1a24c199-7a55-484c-9b82-88cb46f910f6","Type":"ContainerDied","Data":"1e89785bb28af2662de2bd75be1a1b5116a9fa8c5d7559cc26cea1bc0068abdc"} Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.647961 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a24c199-7a55-484c-9b82-88cb46f910f6-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.655460 4911 scope.go:117] "RemoveContainer" containerID="078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.671240 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rd8cd"] Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.685103 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rd8cd"] Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.696133 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f9kbg"] Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.697187 4911 scope.go:117] "RemoveContainer" containerID="2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.705358 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f9kbg"] Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.721980 4911 scope.go:117] "RemoveContainer" containerID="d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b" Jun 06 09:56:47 crc kubenswrapper[4911]: E0606 09:56:47.722496 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b\": container with ID starting with d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b not found: ID does not exist" containerID="d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.722555 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b"} err="failed to get container status \"d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b\": rpc error: code = NotFound desc = could not find container \"d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b\": container with ID starting with d347f42ad7e7331a3f1a674eaf899fab6f5f3d25f556be7b1a7d003097c2333b not found: ID does not exist" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.722578 4911 scope.go:117] "RemoveContainer" containerID="078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3" Jun 06 09:56:47 crc kubenswrapper[4911]: E0606 09:56:47.722989 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3\": container with ID starting with 078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3 not found: ID does not exist" containerID="078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.723017 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3"} err="failed to get container status \"078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3\": rpc error: code = NotFound desc = could not find container \"078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3\": container with ID starting with 078dcaec53b076208470e4aa0aeed5c90b43d5d9cd074682b815197344ddc5d3 not found: ID does not exist" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.723035 4911 scope.go:117] "RemoveContainer" containerID="2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f" Jun 06 09:56:47 crc kubenswrapper[4911]: E0606 09:56:47.723315 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f\": container with ID starting with 2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f not found: ID does not exist" containerID="2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.723367 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f"} err="failed to get container status \"2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f\": rpc error: code = NotFound desc = could not find container \"2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f\": container with ID starting with 2df43ff8753b5b894b648da9a7999a36ca42372fde74a34e009e677142de4b9f not found: ID does not exist" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.723396 4911 scope.go:117] "RemoveContainer" containerID="6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.743461 4911 scope.go:117] "RemoveContainer" containerID="205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.803999 4911 scope.go:117] "RemoveContainer" containerID="8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.847084 4911 scope.go:117] "RemoveContainer" containerID="6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae" Jun 06 09:56:47 crc kubenswrapper[4911]: E0606 09:56:47.847641 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae\": container with ID starting with 6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae not found: ID does not exist" containerID="6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.847704 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae"} err="failed to get container status \"6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae\": rpc error: code = NotFound desc = could not find container \"6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae\": container with ID starting with 6f9a1ad7412b33d75e56478d04ba3a94dfc752a27ef4ca45e5281f6ade8db8ae not found: ID does not exist" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.847748 4911 scope.go:117] "RemoveContainer" containerID="205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af" Jun 06 09:56:47 crc kubenswrapper[4911]: E0606 09:56:47.848073 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af\": container with ID starting with 205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af not found: ID does not exist" containerID="205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.848118 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af"} err="failed to get container status \"205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af\": rpc error: code = NotFound desc = could not find container \"205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af\": container with ID starting with 205910380b4e2640d7836341ee3a71b693bb44789c948b4195a09506c1e7f0af not found: ID does not exist" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.848141 4911 scope.go:117] "RemoveContainer" containerID="8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b" Jun 06 09:56:47 crc kubenswrapper[4911]: E0606 09:56:47.848424 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b\": container with ID starting with 8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b not found: ID does not exist" containerID="8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.848454 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b"} err="failed to get container status \"8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b\": rpc error: code = NotFound desc = could not find container \"8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b\": container with ID starting with 8ecbec01d534cb176f7a970d52724adae675b6e94258bf5d4c18dd9c72c5215b not found: ID does not exist" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.958875 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" path="/var/lib/kubelet/pods/1a24c199-7a55-484c-9b82-88cb46f910f6/volumes" Jun 06 09:56:47 crc kubenswrapper[4911]: I0606 09:56:47.959641 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" path="/var/lib/kubelet/pods/5aa5de25-eb65-4fec-8ef8-5d0e1621c46b/volumes" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.792462 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-9gwlw"] Jun 06 09:57:01 crc kubenswrapper[4911]: E0606 09:57:01.793421 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="registry-server" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793436 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="registry-server" Jun 06 09:57:01 crc kubenswrapper[4911]: E0606 09:57:01.793461 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="extract-utilities" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793467 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="extract-utilities" Jun 06 09:57:01 crc kubenswrapper[4911]: E0606 09:57:01.793475 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="extract-utilities" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793482 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="extract-utilities" Jun 06 09:57:01 crc kubenswrapper[4911]: E0606 09:57:01.793503 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="extract-content" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793509 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="extract-content" Jun 06 09:57:01 crc kubenswrapper[4911]: E0606 09:57:01.793518 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="registry-server" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793524 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="registry-server" Jun 06 09:57:01 crc kubenswrapper[4911]: E0606 09:57:01.793534 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="extract-content" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793539 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="extract-content" Jun 06 09:57:01 crc kubenswrapper[4911]: E0606 09:57:01.793557 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc7deb8-a82d-4b0d-b3f9-200faf558eeb" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793565 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc7deb8-a82d-4b0d-b3f9-200faf558eeb" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793750 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aa5de25-eb65-4fec-8ef8-5d0e1621c46b" containerName="registry-server" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793768 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="efc7deb8-a82d-4b0d-b3f9-200faf558eeb" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.793776 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a24c199-7a55-484c-9b82-88cb46f910f6" containerName="registry-server" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.794457 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9gwlw" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.892051 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjlbw\" (UniqueName: \"kubernetes.io/projected/34e1bd84-2289-421a-b025-a52bcb4fd820-kube-api-access-sjlbw\") pod \"crc-debug-9gwlw\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " pod="openstack/crc-debug-9gwlw" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.892222 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/34e1bd84-2289-421a-b025-a52bcb4fd820-host\") pod \"crc-debug-9gwlw\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " pod="openstack/crc-debug-9gwlw" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.994795 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjlbw\" (UniqueName: \"kubernetes.io/projected/34e1bd84-2289-421a-b025-a52bcb4fd820-kube-api-access-sjlbw\") pod \"crc-debug-9gwlw\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " pod="openstack/crc-debug-9gwlw" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.994862 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/34e1bd84-2289-421a-b025-a52bcb4fd820-host\") pod \"crc-debug-9gwlw\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " pod="openstack/crc-debug-9gwlw" Jun 06 09:57:01 crc kubenswrapper[4911]: I0606 09:57:01.995122 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/34e1bd84-2289-421a-b025-a52bcb4fd820-host\") pod \"crc-debug-9gwlw\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " pod="openstack/crc-debug-9gwlw" Jun 06 09:57:02 crc kubenswrapper[4911]: I0606 09:57:02.014753 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjlbw\" (UniqueName: \"kubernetes.io/projected/34e1bd84-2289-421a-b025-a52bcb4fd820-kube-api-access-sjlbw\") pod \"crc-debug-9gwlw\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " pod="openstack/crc-debug-9gwlw" Jun 06 09:57:02 crc kubenswrapper[4911]: I0606 09:57:02.113551 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9gwlw" Jun 06 09:57:02 crc kubenswrapper[4911]: I0606 09:57:02.887514 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-9gwlw" event={"ID":"34e1bd84-2289-421a-b025-a52bcb4fd820","Type":"ContainerStarted","Data":"c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf"} Jun 06 09:57:02 crc kubenswrapper[4911]: I0606 09:57:02.887833 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-9gwlw" event={"ID":"34e1bd84-2289-421a-b025-a52bcb4fd820","Type":"ContainerStarted","Data":"6ced128cc3ae3db98ca66667dbcc3a8aefd0d4850f88a3ec93a714aab2a033b1"} Jun 06 09:57:02 crc kubenswrapper[4911]: I0606 09:57:02.907658 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-9gwlw" podStartSLOduration=1.907634985 podStartE2EDuration="1.907634985s" podCreationTimestamp="2025-06-06 09:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:57:02.901656849 +0000 UTC m=+2634.177082392" watchObservedRunningTime="2025-06-06 09:57:02.907634985 +0000 UTC m=+2634.183060528" Jun 06 09:57:04 crc kubenswrapper[4911]: I0606 09:57:04.782320 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9nsqk"] Jun 06 09:57:04 crc kubenswrapper[4911]: I0606 09:57:04.785063 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:04 crc kubenswrapper[4911]: I0606 09:57:04.795555 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9nsqk"] Jun 06 09:57:04 crc kubenswrapper[4911]: I0606 09:57:04.953115 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgk6l\" (UniqueName: \"kubernetes.io/projected/88393b7e-da87-4e83-8189-4ae89c499b77-kube-api-access-pgk6l\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:04 crc kubenswrapper[4911]: I0606 09:57:04.953388 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88393b7e-da87-4e83-8189-4ae89c499b77-utilities\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:04 crc kubenswrapper[4911]: I0606 09:57:04.953432 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88393b7e-da87-4e83-8189-4ae89c499b77-catalog-content\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.056247 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88393b7e-da87-4e83-8189-4ae89c499b77-utilities\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.056348 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88393b7e-da87-4e83-8189-4ae89c499b77-catalog-content\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.056581 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgk6l\" (UniqueName: \"kubernetes.io/projected/88393b7e-da87-4e83-8189-4ae89c499b77-kube-api-access-pgk6l\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.056998 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88393b7e-da87-4e83-8189-4ae89c499b77-utilities\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.057773 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88393b7e-da87-4e83-8189-4ae89c499b77-catalog-content\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.085055 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgk6l\" (UniqueName: \"kubernetes.io/projected/88393b7e-da87-4e83-8189-4ae89c499b77-kube-api-access-pgk6l\") pod \"certified-operators-9nsqk\" (UID: \"88393b7e-da87-4e83-8189-4ae89c499b77\") " pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.107029 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.639888 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9nsqk"] Jun 06 09:57:05 crc kubenswrapper[4911]: W0606 09:57:05.646252 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88393b7e_da87_4e83_8189_4ae89c499b77.slice/crio-4118822b87735b3171e1dd4fa96b59a1956c0da35d34072bae58352e7bb1047e WatchSource:0}: Error finding container 4118822b87735b3171e1dd4fa96b59a1956c0da35d34072bae58352e7bb1047e: Status 404 returned error can't find the container with id 4118822b87735b3171e1dd4fa96b59a1956c0da35d34072bae58352e7bb1047e Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.916657 4911 generic.go:334] "Generic (PLEG): container finished" podID="88393b7e-da87-4e83-8189-4ae89c499b77" containerID="33d5911813299b24c493fcd82fc2a0fa600af9cce9764d253f911a2f8283e306" exitCode=0 Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.917387 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nsqk" event={"ID":"88393b7e-da87-4e83-8189-4ae89c499b77","Type":"ContainerDied","Data":"33d5911813299b24c493fcd82fc2a0fa600af9cce9764d253f911a2f8283e306"} Jun 06 09:57:05 crc kubenswrapper[4911]: I0606 09:57:05.917426 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nsqk" event={"ID":"88393b7e-da87-4e83-8189-4ae89c499b77","Type":"ContainerStarted","Data":"4118822b87735b3171e1dd4fa96b59a1956c0da35d34072bae58352e7bb1047e"} Jun 06 09:57:08 crc kubenswrapper[4911]: I0606 09:57:08.950950 4911 generic.go:334] "Generic (PLEG): container finished" podID="88393b7e-da87-4e83-8189-4ae89c499b77" containerID="4463c9a06d3bfbefde5c26b84e2c90577553d66735dcc71e896e034d06ee9151" exitCode=0 Jun 06 09:57:08 crc kubenswrapper[4911]: I0606 09:57:08.951060 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nsqk" event={"ID":"88393b7e-da87-4e83-8189-4ae89c499b77","Type":"ContainerDied","Data":"4463c9a06d3bfbefde5c26b84e2c90577553d66735dcc71e896e034d06ee9151"} Jun 06 09:57:09 crc kubenswrapper[4911]: I0606 09:57:09.967555 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nsqk" event={"ID":"88393b7e-da87-4e83-8189-4ae89c499b77","Type":"ContainerStarted","Data":"e0a1db37ff70483e23971e534e04f45faebf384dd690d61f7fe1490da301b77c"} Jun 06 09:57:09 crc kubenswrapper[4911]: I0606 09:57:09.996366 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9nsqk" podStartSLOduration=2.512759932 podStartE2EDuration="5.996341815s" podCreationTimestamp="2025-06-06 09:57:04 +0000 UTC" firstStartedPulling="2025-06-06 09:57:05.918660764 +0000 UTC m=+2637.194086307" lastFinishedPulling="2025-06-06 09:57:09.402242647 +0000 UTC m=+2640.677668190" observedRunningTime="2025-06-06 09:57:09.987156896 +0000 UTC m=+2641.262582459" watchObservedRunningTime="2025-06-06 09:57:09.996341815 +0000 UTC m=+2641.271767358" Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.685463 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-9gwlw"] Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.686009 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-9gwlw" podUID="34e1bd84-2289-421a-b025-a52bcb4fd820" containerName="container-00" containerID="cri-o://c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf" gracePeriod=2 Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.696209 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-9gwlw"] Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.797321 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9gwlw" Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.916369 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjlbw\" (UniqueName: \"kubernetes.io/projected/34e1bd84-2289-421a-b025-a52bcb4fd820-kube-api-access-sjlbw\") pod \"34e1bd84-2289-421a-b025-a52bcb4fd820\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.916883 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/34e1bd84-2289-421a-b025-a52bcb4fd820-host\") pod \"34e1bd84-2289-421a-b025-a52bcb4fd820\" (UID: \"34e1bd84-2289-421a-b025-a52bcb4fd820\") " Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.916971 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/34e1bd84-2289-421a-b025-a52bcb4fd820-host" (OuterVolumeSpecName: "host") pod "34e1bd84-2289-421a-b025-a52bcb4fd820" (UID: "34e1bd84-2289-421a-b025-a52bcb4fd820"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.917608 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/34e1bd84-2289-421a-b025-a52bcb4fd820-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:57:12 crc kubenswrapper[4911]: I0606 09:57:12.923148 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34e1bd84-2289-421a-b025-a52bcb4fd820-kube-api-access-sjlbw" (OuterVolumeSpecName: "kube-api-access-sjlbw") pod "34e1bd84-2289-421a-b025-a52bcb4fd820" (UID: "34e1bd84-2289-421a-b025-a52bcb4fd820"). InnerVolumeSpecName "kube-api-access-sjlbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:57:13 crc kubenswrapper[4911]: I0606 09:57:13.013355 4911 generic.go:334] "Generic (PLEG): container finished" podID="34e1bd84-2289-421a-b025-a52bcb4fd820" containerID="c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf" exitCode=0 Jun 06 09:57:13 crc kubenswrapper[4911]: I0606 09:57:13.013443 4911 scope.go:117] "RemoveContainer" containerID="c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf" Jun 06 09:57:13 crc kubenswrapper[4911]: I0606 09:57:13.013896 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9gwlw" Jun 06 09:57:13 crc kubenswrapper[4911]: I0606 09:57:13.021833 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjlbw\" (UniqueName: \"kubernetes.io/projected/34e1bd84-2289-421a-b025-a52bcb4fd820-kube-api-access-sjlbw\") on node \"crc\" DevicePath \"\"" Jun 06 09:57:13 crc kubenswrapper[4911]: I0606 09:57:13.039043 4911 scope.go:117] "RemoveContainer" containerID="c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf" Jun 06 09:57:13 crc kubenswrapper[4911]: E0606 09:57:13.039722 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf\": container with ID starting with c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf not found: ID does not exist" containerID="c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf" Jun 06 09:57:13 crc kubenswrapper[4911]: I0606 09:57:13.039952 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf"} err="failed to get container status \"c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf\": rpc error: code = NotFound desc = could not find container \"c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf\": container with ID starting with c593cb71d63f1894498c96751143d7a0f35e99c9d5c0cc938118488d68a79caf not found: ID does not exist" Jun 06 09:57:13 crc kubenswrapper[4911]: I0606 09:57:13.959289 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34e1bd84-2289-421a-b025-a52bcb4fd820" path="/var/lib/kubelet/pods/34e1bd84-2289-421a-b025-a52bcb4fd820/volumes" Jun 06 09:57:15 crc kubenswrapper[4911]: I0606 09:57:15.107467 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:15 crc kubenswrapper[4911]: I0606 09:57:15.107803 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:15 crc kubenswrapper[4911]: I0606 09:57:15.155334 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:16 crc kubenswrapper[4911]: I0606 09:57:16.115027 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9nsqk" Jun 06 09:57:16 crc kubenswrapper[4911]: I0606 09:57:16.181486 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9nsqk"] Jun 06 09:57:16 crc kubenswrapper[4911]: I0606 09:57:16.228856 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g6x48"] Jun 06 09:57:16 crc kubenswrapper[4911]: I0606 09:57:16.229305 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g6x48" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="registry-server" containerID="cri-o://4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838" gracePeriod=2 Jun 06 09:57:16 crc kubenswrapper[4911]: I0606 09:57:16.876181 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.011414 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-utilities\") pod \"c0f6021c-b179-459f-9b3f-901c36c58d5f\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.011491 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-catalog-content\") pod \"c0f6021c-b179-459f-9b3f-901c36c58d5f\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.011734 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx7bz\" (UniqueName: \"kubernetes.io/projected/c0f6021c-b179-459f-9b3f-901c36c58d5f-kube-api-access-jx7bz\") pod \"c0f6021c-b179-459f-9b3f-901c36c58d5f\" (UID: \"c0f6021c-b179-459f-9b3f-901c36c58d5f\") " Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.013958 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-utilities" (OuterVolumeSpecName: "utilities") pod "c0f6021c-b179-459f-9b3f-901c36c58d5f" (UID: "c0f6021c-b179-459f-9b3f-901c36c58d5f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.019375 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0f6021c-b179-459f-9b3f-901c36c58d5f-kube-api-access-jx7bz" (OuterVolumeSpecName: "kube-api-access-jx7bz") pod "c0f6021c-b179-459f-9b3f-901c36c58d5f" (UID: "c0f6021c-b179-459f-9b3f-901c36c58d5f"). InnerVolumeSpecName "kube-api-access-jx7bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.047052 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0f6021c-b179-459f-9b3f-901c36c58d5f" (UID: "c0f6021c-b179-459f-9b3f-901c36c58d5f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.065433 4911 generic.go:334] "Generic (PLEG): container finished" podID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerID="4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838" exitCode=0 Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.065829 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6x48" event={"ID":"c0f6021c-b179-459f-9b3f-901c36c58d5f","Type":"ContainerDied","Data":"4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838"} Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.065876 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6x48" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.065888 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6x48" event={"ID":"c0f6021c-b179-459f-9b3f-901c36c58d5f","Type":"ContainerDied","Data":"c2e1c91cb20606b4428879708f6253513b361751d789752de11aacad4eac6db1"} Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.065984 4911 scope.go:117] "RemoveContainer" containerID="4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.100792 4911 scope.go:117] "RemoveContainer" containerID="ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.110683 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g6x48"] Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.113841 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.114074 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0f6021c-b179-459f-9b3f-901c36c58d5f-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.114182 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx7bz\" (UniqueName: \"kubernetes.io/projected/c0f6021c-b179-459f-9b3f-901c36c58d5f-kube-api-access-jx7bz\") on node \"crc\" DevicePath \"\"" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.153988 4911 scope.go:117] "RemoveContainer" containerID="4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.159567 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g6x48"] Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.179842 4911 scope.go:117] "RemoveContainer" containerID="4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838" Jun 06 09:57:17 crc kubenswrapper[4911]: E0606 09:57:17.181000 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838\": container with ID starting with 4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838 not found: ID does not exist" containerID="4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.181066 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838"} err="failed to get container status \"4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838\": rpc error: code = NotFound desc = could not find container \"4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838\": container with ID starting with 4b6bf814f794ccb086127334de83427c167ed78e1013d26b3c077591cb2e5838 not found: ID does not exist" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.181114 4911 scope.go:117] "RemoveContainer" containerID="ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689" Jun 06 09:57:17 crc kubenswrapper[4911]: E0606 09:57:17.181588 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689\": container with ID starting with ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689 not found: ID does not exist" containerID="ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.181620 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689"} err="failed to get container status \"ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689\": rpc error: code = NotFound desc = could not find container \"ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689\": container with ID starting with ceed939c6964756fcb3ae5b1d2f68154a37a05600dcde71a977d41bb0f276689 not found: ID does not exist" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.181646 4911 scope.go:117] "RemoveContainer" containerID="4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454" Jun 06 09:57:17 crc kubenswrapper[4911]: E0606 09:57:17.182009 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454\": container with ID starting with 4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454 not found: ID does not exist" containerID="4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.182081 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454"} err="failed to get container status \"4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454\": rpc error: code = NotFound desc = could not find container \"4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454\": container with ID starting with 4a683cc8cf6cf5277812ad5805635ea76caf7971e3d14210cba88131911df454 not found: ID does not exist" Jun 06 09:57:17 crc kubenswrapper[4911]: I0606 09:57:17.959970 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" path="/var/lib/kubelet/pods/c0f6021c-b179-459f-9b3f-901c36c58d5f/volumes" Jun 06 09:57:24 crc kubenswrapper[4911]: I0606 09:57:24.299875 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:57:24 crc kubenswrapper[4911]: I0606 09:57:24.300351 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.166928 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Jun 06 09:57:37 crc kubenswrapper[4911]: E0606 09:57:37.171016 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="extract-utilities" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.171201 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="extract-utilities" Jun 06 09:57:37 crc kubenswrapper[4911]: E0606 09:57:37.171294 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="extract-content" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.171359 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="extract-content" Jun 06 09:57:37 crc kubenswrapper[4911]: E0606 09:57:37.171434 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34e1bd84-2289-421a-b025-a52bcb4fd820" containerName="container-00" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.171491 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="34e1bd84-2289-421a-b025-a52bcb4fd820" containerName="container-00" Jun 06 09:57:37 crc kubenswrapper[4911]: E0606 09:57:37.171584 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="registry-server" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.171666 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="registry-server" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.171933 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="34e1bd84-2289-421a-b025-a52bcb4fd820" containerName="container-00" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.172020 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0f6021c-b179-459f-9b3f-901c36c58d5f" containerName="registry-server" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.172777 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.176570 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.176634 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.176730 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.182422 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224297 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9a6fdc52-110b-4573-92b5-57f19994ab56-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224347 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224367 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224391 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9a6fdc52-110b-4573-92b5-57f19994ab56-config-data\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224422 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9a6fdc52-110b-4573-92b5-57f19994ab56-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224483 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224632 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9a6fdc52-110b-4573-92b5-57f19994ab56-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224763 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jdb5\" (UniqueName: \"kubernetes.io/projected/9a6fdc52-110b-4573-92b5-57f19994ab56-kube-api-access-5jdb5\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.224796 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.326569 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.326968 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9a6fdc52-110b-4573-92b5-57f19994ab56-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.327151 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jdb5\" (UniqueName: \"kubernetes.io/projected/9a6fdc52-110b-4573-92b5-57f19994ab56-kube-api-access-5jdb5\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.327447 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.327541 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/9a6fdc52-110b-4573-92b5-57f19994ab56-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.327969 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9a6fdc52-110b-4573-92b5-57f19994ab56-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.328134 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.328266 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.328392 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9a6fdc52-110b-4573-92b5-57f19994ab56-config-data\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.328521 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9a6fdc52-110b-4573-92b5-57f19994ab56-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.328395 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/9a6fdc52-110b-4573-92b5-57f19994ab56-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.328607 4911 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.329673 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9a6fdc52-110b-4573-92b5-57f19994ab56-config-data\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.329780 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/9a6fdc52-110b-4573-92b5-57f19994ab56-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.333290 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.333995 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.334206 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a6fdc52-110b-4573-92b5-57f19994ab56-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.359805 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.359997 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jdb5\" (UniqueName: \"kubernetes.io/projected/9a6fdc52-110b-4573-92b5-57f19994ab56-kube-api-access-5jdb5\") pod \"tempest-tests-tempest\" (UID: \"9a6fdc52-110b-4573-92b5-57f19994ab56\") " pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.501536 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jun 06 09:57:37 crc kubenswrapper[4911]: I0606 09:57:37.937593 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jun 06 09:57:38 crc kubenswrapper[4911]: I0606 09:57:38.277181 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"9a6fdc52-110b-4573-92b5-57f19994ab56","Type":"ContainerStarted","Data":"902e70d65d0d4718f1b58fa6529b12a9bd2d12b6faf870c6c4899d86b82da67a"} Jun 06 09:57:54 crc kubenswrapper[4911]: I0606 09:57:54.301006 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:57:54 crc kubenswrapper[4911]: I0606 09:57:54.301819 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.070737 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-l5z9x"] Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.072824 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-l5z9x" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.191330 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtchf\" (UniqueName: \"kubernetes.io/projected/405a9491-e37a-4bb2-b3a8-2faf924758f9-kube-api-access-mtchf\") pod \"crc-debug-l5z9x\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " pod="openstack/crc-debug-l5z9x" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.191944 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/405a9491-e37a-4bb2-b3a8-2faf924758f9-host\") pod \"crc-debug-l5z9x\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " pod="openstack/crc-debug-l5z9x" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.293827 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/405a9491-e37a-4bb2-b3a8-2faf924758f9-host\") pod \"crc-debug-l5z9x\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " pod="openstack/crc-debug-l5z9x" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.293988 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/405a9491-e37a-4bb2-b3a8-2faf924758f9-host\") pod \"crc-debug-l5z9x\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " pod="openstack/crc-debug-l5z9x" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.294064 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtchf\" (UniqueName: \"kubernetes.io/projected/405a9491-e37a-4bb2-b3a8-2faf924758f9-kube-api-access-mtchf\") pod \"crc-debug-l5z9x\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " pod="openstack/crc-debug-l5z9x" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.321603 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtchf\" (UniqueName: \"kubernetes.io/projected/405a9491-e37a-4bb2-b3a8-2faf924758f9-kube-api-access-mtchf\") pod \"crc-debug-l5z9x\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " pod="openstack/crc-debug-l5z9x" Jun 06 09:58:02 crc kubenswrapper[4911]: I0606 09:58:02.406844 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-l5z9x" Jun 06 09:58:17 crc kubenswrapper[4911]: W0606 09:58:17.413852 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod405a9491_e37a_4bb2_b3a8_2faf924758f9.slice/crio-9e78feb07ce778cf8c03a35d39a103aef7cbbe3938768403c08b9aad50ed1e19 WatchSource:0}: Error finding container 9e78feb07ce778cf8c03a35d39a103aef7cbbe3938768403c08b9aad50ed1e19: Status 404 returned error can't find the container with id 9e78feb07ce778cf8c03a35d39a103aef7cbbe3938768403c08b9aad50ed1e19 Jun 06 09:58:17 crc kubenswrapper[4911]: E0606 09:58:17.554401 4911 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Jun 06 09:58:17 crc kubenswrapper[4911]: E0606 09:58:17.554610 4911 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5jdb5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(9a6fdc52-110b-4573-92b5-57f19994ab56): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jun 06 09:58:17 crc kubenswrapper[4911]: E0606 09:58:17.555818 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="9a6fdc52-110b-4573-92b5-57f19994ab56" Jun 06 09:58:17 crc kubenswrapper[4911]: I0606 09:58:17.667596 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-l5z9x" event={"ID":"405a9491-e37a-4bb2-b3a8-2faf924758f9","Type":"ContainerStarted","Data":"6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a"} Jun 06 09:58:17 crc kubenswrapper[4911]: I0606 09:58:17.668042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-l5z9x" event={"ID":"405a9491-e37a-4bb2-b3a8-2faf924758f9","Type":"ContainerStarted","Data":"9e78feb07ce778cf8c03a35d39a103aef7cbbe3938768403c08b9aad50ed1e19"} Jun 06 09:58:17 crc kubenswrapper[4911]: E0606 09:58:17.669541 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="9a6fdc52-110b-4573-92b5-57f19994ab56" Jun 06 09:58:17 crc kubenswrapper[4911]: I0606 09:58:17.706256 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-l5z9x" podStartSLOduration=15.706231191 podStartE2EDuration="15.706231191s" podCreationTimestamp="2025-06-06 09:58:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:58:17.700860021 +0000 UTC m=+2708.976285584" watchObservedRunningTime="2025-06-06 09:58:17.706231191 +0000 UTC m=+2708.981656734" Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.301177 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.302212 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.302267 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.303005 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a26b21cd2dacce99add5f3271857622702e798058ab60957fc172d6f9b03523a"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.303060 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://a26b21cd2dacce99add5f3271857622702e798058ab60957fc172d6f9b03523a" gracePeriod=600 Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.741226 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="a26b21cd2dacce99add5f3271857622702e798058ab60957fc172d6f9b03523a" exitCode=0 Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.741303 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"a26b21cd2dacce99add5f3271857622702e798058ab60957fc172d6f9b03523a"} Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.741696 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac"} Jun 06 09:58:24 crc kubenswrapper[4911]: I0606 09:58:24.741727 4911 scope.go:117] "RemoveContainer" containerID="be64d24391a4e4ca051a90e8b2666e9cd876030cde8771af35f6c9943814cf81" Jun 06 09:58:27 crc kubenswrapper[4911]: I0606 09:58:27.983583 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-l5z9x"] Jun 06 09:58:27 crc kubenswrapper[4911]: I0606 09:58:27.984617 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-l5z9x" podUID="405a9491-e37a-4bb2-b3a8-2faf924758f9" containerName="container-00" containerID="cri-o://6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a" gracePeriod=2 Jun 06 09:58:27 crc kubenswrapper[4911]: I0606 09:58:27.995508 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-l5z9x"] Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.088706 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-l5z9x" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.248835 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtchf\" (UniqueName: \"kubernetes.io/projected/405a9491-e37a-4bb2-b3a8-2faf924758f9-kube-api-access-mtchf\") pod \"405a9491-e37a-4bb2-b3a8-2faf924758f9\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.248887 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/405a9491-e37a-4bb2-b3a8-2faf924758f9-host\") pod \"405a9491-e37a-4bb2-b3a8-2faf924758f9\" (UID: \"405a9491-e37a-4bb2-b3a8-2faf924758f9\") " Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.249076 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/405a9491-e37a-4bb2-b3a8-2faf924758f9-host" (OuterVolumeSpecName: "host") pod "405a9491-e37a-4bb2-b3a8-2faf924758f9" (UID: "405a9491-e37a-4bb2-b3a8-2faf924758f9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.249747 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/405a9491-e37a-4bb2-b3a8-2faf924758f9-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.257331 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/405a9491-e37a-4bb2-b3a8-2faf924758f9-kube-api-access-mtchf" (OuterVolumeSpecName: "kube-api-access-mtchf") pod "405a9491-e37a-4bb2-b3a8-2faf924758f9" (UID: "405a9491-e37a-4bb2-b3a8-2faf924758f9"). InnerVolumeSpecName "kube-api-access-mtchf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.351548 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtchf\" (UniqueName: \"kubernetes.io/projected/405a9491-e37a-4bb2-b3a8-2faf924758f9-kube-api-access-mtchf\") on node \"crc\" DevicePath \"\"" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.790157 4911 generic.go:334] "Generic (PLEG): container finished" podID="405a9491-e37a-4bb2-b3a8-2faf924758f9" containerID="6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a" exitCode=0 Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.790257 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-l5z9x" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.790311 4911 scope.go:117] "RemoveContainer" containerID="6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.825706 4911 scope.go:117] "RemoveContainer" containerID="6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a" Jun 06 09:58:28 crc kubenswrapper[4911]: E0606 09:58:28.826433 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a\": container with ID starting with 6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a not found: ID does not exist" containerID="6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a" Jun 06 09:58:28 crc kubenswrapper[4911]: I0606 09:58:28.826482 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a"} err="failed to get container status \"6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a\": rpc error: code = NotFound desc = could not find container \"6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a\": container with ID starting with 6138225ea455641dbafce6d6111669b0cc77d7c3b994e9d84f7e1919075b866a not found: ID does not exist" Jun 06 09:58:29 crc kubenswrapper[4911]: I0606 09:58:29.724801 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jun 06 09:58:29 crc kubenswrapper[4911]: I0606 09:58:29.958469 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="405a9491-e37a-4bb2-b3a8-2faf924758f9" path="/var/lib/kubelet/pods/405a9491-e37a-4bb2-b3a8-2faf924758f9/volumes" Jun 06 09:58:30 crc kubenswrapper[4911]: I0606 09:58:30.813896 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"9a6fdc52-110b-4573-92b5-57f19994ab56","Type":"ContainerStarted","Data":"5f708822e32732e21d934fe0d4a2b2fa0f01afa6a0607bd77fce4a155333b6e2"} Jun 06 09:58:30 crc kubenswrapper[4911]: I0606 09:58:30.834387 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.06085279 podStartE2EDuration="54.83436573s" podCreationTimestamp="2025-06-06 09:57:36 +0000 UTC" firstStartedPulling="2025-06-06 09:57:37.947905528 +0000 UTC m=+2669.223331071" lastFinishedPulling="2025-06-06 09:58:29.721418468 +0000 UTC m=+2720.996844011" observedRunningTime="2025-06-06 09:58:30.831313641 +0000 UTC m=+2722.106739204" watchObservedRunningTime="2025-06-06 09:58:30.83436573 +0000 UTC m=+2722.109791273" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.448729 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-2ln4f"] Jun 06 09:59:02 crc kubenswrapper[4911]: E0606 09:59:02.450032 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="405a9491-e37a-4bb2-b3a8-2faf924758f9" containerName="container-00" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.450057 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="405a9491-e37a-4bb2-b3a8-2faf924758f9" containerName="container-00" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.450306 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="405a9491-e37a-4bb2-b3a8-2faf924758f9" containerName="container-00" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.451132 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2ln4f" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.544945 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/40bacced-fd62-47b4-8db0-16eda3506911-host\") pod \"crc-debug-2ln4f\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " pod="openstack/crc-debug-2ln4f" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.545008 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcg2k\" (UniqueName: \"kubernetes.io/projected/40bacced-fd62-47b4-8db0-16eda3506911-kube-api-access-gcg2k\") pod \"crc-debug-2ln4f\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " pod="openstack/crc-debug-2ln4f" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.646945 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/40bacced-fd62-47b4-8db0-16eda3506911-host\") pod \"crc-debug-2ln4f\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " pod="openstack/crc-debug-2ln4f" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.647000 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcg2k\" (UniqueName: \"kubernetes.io/projected/40bacced-fd62-47b4-8db0-16eda3506911-kube-api-access-gcg2k\") pod \"crc-debug-2ln4f\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " pod="openstack/crc-debug-2ln4f" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.647076 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/40bacced-fd62-47b4-8db0-16eda3506911-host\") pod \"crc-debug-2ln4f\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " pod="openstack/crc-debug-2ln4f" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.668029 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcg2k\" (UniqueName: \"kubernetes.io/projected/40bacced-fd62-47b4-8db0-16eda3506911-kube-api-access-gcg2k\") pod \"crc-debug-2ln4f\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " pod="openstack/crc-debug-2ln4f" Jun 06 09:59:02 crc kubenswrapper[4911]: I0606 09:59:02.773272 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2ln4f" Jun 06 09:59:03 crc kubenswrapper[4911]: I0606 09:59:03.129896 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2ln4f" event={"ID":"40bacced-fd62-47b4-8db0-16eda3506911","Type":"ContainerStarted","Data":"56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac"} Jun 06 09:59:03 crc kubenswrapper[4911]: I0606 09:59:03.130368 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2ln4f" event={"ID":"40bacced-fd62-47b4-8db0-16eda3506911","Type":"ContainerStarted","Data":"ecba63a6f3fbacc6855036b368029432b8888fbc32cda925de7637d0fa8b0ac8"} Jun 06 09:59:03 crc kubenswrapper[4911]: I0606 09:59:03.152240 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-2ln4f" podStartSLOduration=1.152220055 podStartE2EDuration="1.152220055s" podCreationTimestamp="2025-06-06 09:59:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 09:59:03.144388571 +0000 UTC m=+2754.419814134" watchObservedRunningTime="2025-06-06 09:59:03.152220055 +0000 UTC m=+2754.427645598" Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.420057 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-2ln4f"] Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.421179 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-2ln4f" podUID="40bacced-fd62-47b4-8db0-16eda3506911" containerName="container-00" containerID="cri-o://56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac" gracePeriod=2 Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.431621 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-2ln4f"] Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.538201 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2ln4f" Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.679564 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/40bacced-fd62-47b4-8db0-16eda3506911-host\") pod \"40bacced-fd62-47b4-8db0-16eda3506911\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.679762 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcg2k\" (UniqueName: \"kubernetes.io/projected/40bacced-fd62-47b4-8db0-16eda3506911-kube-api-access-gcg2k\") pod \"40bacced-fd62-47b4-8db0-16eda3506911\" (UID: \"40bacced-fd62-47b4-8db0-16eda3506911\") " Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.681003 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40bacced-fd62-47b4-8db0-16eda3506911-host" (OuterVolumeSpecName: "host") pod "40bacced-fd62-47b4-8db0-16eda3506911" (UID: "40bacced-fd62-47b4-8db0-16eda3506911"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.688360 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40bacced-fd62-47b4-8db0-16eda3506911-kube-api-access-gcg2k" (OuterVolumeSpecName: "kube-api-access-gcg2k") pod "40bacced-fd62-47b4-8db0-16eda3506911" (UID: "40bacced-fd62-47b4-8db0-16eda3506911"). InnerVolumeSpecName "kube-api-access-gcg2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.783140 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcg2k\" (UniqueName: \"kubernetes.io/projected/40bacced-fd62-47b4-8db0-16eda3506911-kube-api-access-gcg2k\") on node \"crc\" DevicePath \"\"" Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.783192 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/40bacced-fd62-47b4-8db0-16eda3506911-host\") on node \"crc\" DevicePath \"\"" Jun 06 09:59:13 crc kubenswrapper[4911]: I0606 09:59:13.964312 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40bacced-fd62-47b4-8db0-16eda3506911" path="/var/lib/kubelet/pods/40bacced-fd62-47b4-8db0-16eda3506911/volumes" Jun 06 09:59:14 crc kubenswrapper[4911]: I0606 09:59:14.247910 4911 generic.go:334] "Generic (PLEG): container finished" podID="40bacced-fd62-47b4-8db0-16eda3506911" containerID="56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac" exitCode=0 Jun 06 09:59:14 crc kubenswrapper[4911]: I0606 09:59:14.248011 4911 scope.go:117] "RemoveContainer" containerID="56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac" Jun 06 09:59:14 crc kubenswrapper[4911]: I0606 09:59:14.248064 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2ln4f" Jun 06 09:59:14 crc kubenswrapper[4911]: I0606 09:59:14.276364 4911 scope.go:117] "RemoveContainer" containerID="56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac" Jun 06 09:59:14 crc kubenswrapper[4911]: E0606 09:59:14.276922 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac\": container with ID starting with 56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac not found: ID does not exist" containerID="56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac" Jun 06 09:59:14 crc kubenswrapper[4911]: I0606 09:59:14.276976 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac"} err="failed to get container status \"56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac\": rpc error: code = NotFound desc = could not find container \"56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac\": container with ID starting with 56ac18b90d621f72bde9029e5918de5bb803c9f3bcc1f9443a88e8784d9c25ac not found: ID does not exist" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.142400 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72"] Jun 06 10:00:00 crc kubenswrapper[4911]: E0606 10:00:00.143575 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40bacced-fd62-47b4-8db0-16eda3506911" containerName="container-00" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.143595 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="40bacced-fd62-47b4-8db0-16eda3506911" containerName="container-00" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.143872 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="40bacced-fd62-47b4-8db0-16eda3506911" containerName="container-00" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.144711 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.146528 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.147185 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.153795 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72"] Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.308234 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b46d4c1-85c3-4862-bd90-0a14073266a6-secret-volume\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.308280 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b46d4c1-85c3-4862-bd90-0a14073266a6-config-volume\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.308464 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v9ff\" (UniqueName: \"kubernetes.io/projected/0b46d4c1-85c3-4862-bd90-0a14073266a6-kube-api-access-9v9ff\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.411259 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b46d4c1-85c3-4862-bd90-0a14073266a6-secret-volume\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.411306 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b46d4c1-85c3-4862-bd90-0a14073266a6-config-volume\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.411358 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v9ff\" (UniqueName: \"kubernetes.io/projected/0b46d4c1-85c3-4862-bd90-0a14073266a6-kube-api-access-9v9ff\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.412553 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b46d4c1-85c3-4862-bd90-0a14073266a6-config-volume\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.417451 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b46d4c1-85c3-4862-bd90-0a14073266a6-secret-volume\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.428163 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v9ff\" (UniqueName: \"kubernetes.io/projected/0b46d4c1-85c3-4862-bd90-0a14073266a6-kube-api-access-9v9ff\") pod \"collect-profiles-29153400-j9h72\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:00 crc kubenswrapper[4911]: I0606 10:00:00.482593 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.154174 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72"] Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.696548 4911 generic.go:334] "Generic (PLEG): container finished" podID="0b46d4c1-85c3-4862-bd90-0a14073266a6" containerID="f9a1ef92d178a3c139f04916e144d1f0864d5ecf7ffa0c7e45a5a24e4765232d" exitCode=0 Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.696730 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" event={"ID":"0b46d4c1-85c3-4862-bd90-0a14073266a6","Type":"ContainerDied","Data":"f9a1ef92d178a3c139f04916e144d1f0864d5ecf7ffa0c7e45a5a24e4765232d"} Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.696912 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" event={"ID":"0b46d4c1-85c3-4862-bd90-0a14073266a6","Type":"ContainerStarted","Data":"a987c543e8617145010729dbde5c09e1b95bd76a2aa1811c98c20fc8421209cc"} Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.763305 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-vhl2m"] Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.765046 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vhl2m" Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.941614 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-host\") pod \"crc-debug-vhl2m\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " pod="openstack/crc-debug-vhl2m" Jun 06 10:00:01 crc kubenswrapper[4911]: I0606 10:00:01.941984 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4wml\" (UniqueName: \"kubernetes.io/projected/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-kube-api-access-b4wml\") pod \"crc-debug-vhl2m\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " pod="openstack/crc-debug-vhl2m" Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.044418 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-host\") pod \"crc-debug-vhl2m\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " pod="openstack/crc-debug-vhl2m" Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.044611 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-host\") pod \"crc-debug-vhl2m\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " pod="openstack/crc-debug-vhl2m" Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.045559 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4wml\" (UniqueName: \"kubernetes.io/projected/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-kube-api-access-b4wml\") pod \"crc-debug-vhl2m\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " pod="openstack/crc-debug-vhl2m" Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.068932 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4wml\" (UniqueName: \"kubernetes.io/projected/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-kube-api-access-b4wml\") pod \"crc-debug-vhl2m\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " pod="openstack/crc-debug-vhl2m" Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.091406 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vhl2m" Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.706827 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-vhl2m" event={"ID":"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c","Type":"ContainerStarted","Data":"5e38ccb390c0b66432d0f72072a6500dee02771fd6f64c63b79597c4218a0231"} Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.708626 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-vhl2m" event={"ID":"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c","Type":"ContainerStarted","Data":"9b9762c97d77eaf44acb4db3530ef57ae7158a94fc961ee485aeb063ca23218e"} Jun 06 10:00:02 crc kubenswrapper[4911]: I0606 10:00:02.730832 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-vhl2m" podStartSLOduration=1.7308105550000001 podStartE2EDuration="1.730810555s" podCreationTimestamp="2025-06-06 10:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:00:02.721407321 +0000 UTC m=+2813.996832884" watchObservedRunningTime="2025-06-06 10:00:02.730810555 +0000 UTC m=+2814.006236098" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.718156 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" event={"ID":"0b46d4c1-85c3-4862-bd90-0a14073266a6","Type":"ContainerDied","Data":"a987c543e8617145010729dbde5c09e1b95bd76a2aa1811c98c20fc8421209cc"} Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.718809 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a987c543e8617145010729dbde5c09e1b95bd76a2aa1811c98c20fc8421209cc" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.766515 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.883433 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b46d4c1-85c3-4862-bd90-0a14073266a6-config-volume\") pod \"0b46d4c1-85c3-4862-bd90-0a14073266a6\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.883950 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9v9ff\" (UniqueName: \"kubernetes.io/projected/0b46d4c1-85c3-4862-bd90-0a14073266a6-kube-api-access-9v9ff\") pod \"0b46d4c1-85c3-4862-bd90-0a14073266a6\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.884043 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b46d4c1-85c3-4862-bd90-0a14073266a6-secret-volume\") pod \"0b46d4c1-85c3-4862-bd90-0a14073266a6\" (UID: \"0b46d4c1-85c3-4862-bd90-0a14073266a6\") " Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.884288 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b46d4c1-85c3-4862-bd90-0a14073266a6-config-volume" (OuterVolumeSpecName: "config-volume") pod "0b46d4c1-85c3-4862-bd90-0a14073266a6" (UID: "0b46d4c1-85c3-4862-bd90-0a14073266a6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.884570 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b46d4c1-85c3-4862-bd90-0a14073266a6-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.890544 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b46d4c1-85c3-4862-bd90-0a14073266a6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0b46d4c1-85c3-4862-bd90-0a14073266a6" (UID: "0b46d4c1-85c3-4862-bd90-0a14073266a6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.891909 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b46d4c1-85c3-4862-bd90-0a14073266a6-kube-api-access-9v9ff" (OuterVolumeSpecName: "kube-api-access-9v9ff") pod "0b46d4c1-85c3-4862-bd90-0a14073266a6" (UID: "0b46d4c1-85c3-4862-bd90-0a14073266a6"). InnerVolumeSpecName "kube-api-access-9v9ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.986725 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9v9ff\" (UniqueName: \"kubernetes.io/projected/0b46d4c1-85c3-4862-bd90-0a14073266a6-kube-api-access-9v9ff\") on node \"crc\" DevicePath \"\"" Jun 06 10:00:03 crc kubenswrapper[4911]: I0606 10:00:03.986773 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b46d4c1-85c3-4862-bd90-0a14073266a6-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:00:04 crc kubenswrapper[4911]: I0606 10:00:04.727349 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72" Jun 06 10:00:04 crc kubenswrapper[4911]: I0606 10:00:04.837284 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn"] Jun 06 10:00:04 crc kubenswrapper[4911]: I0606 10:00:04.845311 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153355-cvwzn"] Jun 06 10:00:05 crc kubenswrapper[4911]: I0606 10:00:05.958342 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016" path="/var/lib/kubelet/pods/4e5a954a-6363-4a2a-b6bd-7f6ad9ec9016/volumes" Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.752587 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-vhl2m"] Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.753525 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-vhl2m" podUID="f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" containerName="container-00" containerID="cri-o://5e38ccb390c0b66432d0f72072a6500dee02771fd6f64c63b79597c4218a0231" gracePeriod=2 Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.762917 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-vhl2m"] Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.802828 4911 generic.go:334] "Generic (PLEG): container finished" podID="f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" containerID="5e38ccb390c0b66432d0f72072a6500dee02771fd6f64c63b79597c4218a0231" exitCode=0 Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.802897 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b9762c97d77eaf44acb4db3530ef57ae7158a94fc961ee485aeb063ca23218e" Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.860708 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vhl2m" Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.886446 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4wml\" (UniqueName: \"kubernetes.io/projected/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-kube-api-access-b4wml\") pod \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.886784 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-host\") pod \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\" (UID: \"f16dd22e-d9bc-4ced-bb96-c5617c40cc6c\") " Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.886938 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-host" (OuterVolumeSpecName: "host") pod "f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" (UID: "f16dd22e-d9bc-4ced-bb96-c5617c40cc6c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.887231 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.893247 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-kube-api-access-b4wml" (OuterVolumeSpecName: "kube-api-access-b4wml") pod "f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" (UID: "f16dd22e-d9bc-4ced-bb96-c5617c40cc6c"). InnerVolumeSpecName "kube-api-access-b4wml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:00:12 crc kubenswrapper[4911]: I0606 10:00:12.989361 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4wml\" (UniqueName: \"kubernetes.io/projected/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c-kube-api-access-b4wml\") on node \"crc\" DevicePath \"\"" Jun 06 10:00:13 crc kubenswrapper[4911]: I0606 10:00:13.811134 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vhl2m" Jun 06 10:00:13 crc kubenswrapper[4911]: I0606 10:00:13.959846 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" path="/var/lib/kubelet/pods/f16dd22e-d9bc-4ced-bb96-c5617c40cc6c/volumes" Jun 06 10:00:21 crc kubenswrapper[4911]: I0606 10:00:21.138936 4911 scope.go:117] "RemoveContainer" containerID="4e96101733ef9f53afcc5767ae8295ecd736ff7985e2ffe83b7e612a4f0ea6e4" Jun 06 10:00:24 crc kubenswrapper[4911]: I0606 10:00:24.301440 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:00:24 crc kubenswrapper[4911]: I0606 10:00:24.302076 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:00:54 crc kubenswrapper[4911]: I0606 10:00:54.300581 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:00:54 crc kubenswrapper[4911]: I0606 10:00:54.301186 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.155455 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29153401-s7jg4"] Jun 06 10:01:00 crc kubenswrapper[4911]: E0606 10:01:00.156537 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" containerName="container-00" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.156553 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" containerName="container-00" Jun 06 10:01:00 crc kubenswrapper[4911]: E0606 10:01:00.156566 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b46d4c1-85c3-4862-bd90-0a14073266a6" containerName="collect-profiles" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.156574 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b46d4c1-85c3-4862-bd90-0a14073266a6" containerName="collect-profiles" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.156765 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16dd22e-d9bc-4ced-bb96-c5617c40cc6c" containerName="container-00" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.156790 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b46d4c1-85c3-4862-bd90-0a14073266a6" containerName="collect-profiles" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.157711 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.181879 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29153401-s7jg4"] Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.254207 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg7wm\" (UniqueName: \"kubernetes.io/projected/92c84921-c3c0-43d5-b87c-813ffe3fa478-kube-api-access-kg7wm\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.254284 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-fernet-keys\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.254359 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-config-data\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.254402 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-combined-ca-bundle\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.356348 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-fernet-keys\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.356753 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-config-data\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.356902 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-combined-ca-bundle\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.357173 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg7wm\" (UniqueName: \"kubernetes.io/projected/92c84921-c3c0-43d5-b87c-813ffe3fa478-kube-api-access-kg7wm\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.364154 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-combined-ca-bundle\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.364219 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-config-data\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.372485 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-fernet-keys\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.376676 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg7wm\" (UniqueName: \"kubernetes.io/projected/92c84921-c3c0-43d5-b87c-813ffe3fa478-kube-api-access-kg7wm\") pod \"keystone-cron-29153401-s7jg4\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:00 crc kubenswrapper[4911]: I0606 10:01:00.490460 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:01 crc kubenswrapper[4911]: I0606 10:01:01.206373 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29153401-s7jg4"] Jun 06 10:01:01 crc kubenswrapper[4911]: I0606 10:01:01.253171 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153401-s7jg4" event={"ID":"92c84921-c3c0-43d5-b87c-813ffe3fa478","Type":"ContainerStarted","Data":"8d107bd8ffe34cae571b5020ff8fd5c116a8b1bd682107251eee27a170cf2ac2"} Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.165130 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-ftrzb"] Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.166991 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ftrzb" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.199365 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-247wx\" (UniqueName: \"kubernetes.io/projected/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-kube-api-access-247wx\") pod \"crc-debug-ftrzb\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " pod="openstack/crc-debug-ftrzb" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.199511 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-host\") pod \"crc-debug-ftrzb\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " pod="openstack/crc-debug-ftrzb" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.280175 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153401-s7jg4" event={"ID":"92c84921-c3c0-43d5-b87c-813ffe3fa478","Type":"ContainerStarted","Data":"7dd0f2780ec59b7fe37c9e2b3ed59448ad0f86cc47725556e6247cdec7935120"} Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.302081 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-247wx\" (UniqueName: \"kubernetes.io/projected/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-kube-api-access-247wx\") pod \"crc-debug-ftrzb\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " pod="openstack/crc-debug-ftrzb" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.302304 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-host\") pod \"crc-debug-ftrzb\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " pod="openstack/crc-debug-ftrzb" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.302571 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-host\") pod \"crc-debug-ftrzb\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " pod="openstack/crc-debug-ftrzb" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.309161 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29153401-s7jg4" podStartSLOduration=2.30913901 podStartE2EDuration="2.30913901s" podCreationTimestamp="2025-06-06 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:01:02.305833835 +0000 UTC m=+2873.581259378" watchObservedRunningTime="2025-06-06 10:01:02.30913901 +0000 UTC m=+2873.584564553" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.342341 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-247wx\" (UniqueName: \"kubernetes.io/projected/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-kube-api-access-247wx\") pod \"crc-debug-ftrzb\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " pod="openstack/crc-debug-ftrzb" Jun 06 10:01:02 crc kubenswrapper[4911]: I0606 10:01:02.491454 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ftrzb" Jun 06 10:01:03 crc kubenswrapper[4911]: I0606 10:01:03.290931 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-ftrzb" event={"ID":"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a","Type":"ContainerStarted","Data":"e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d"} Jun 06 10:01:03 crc kubenswrapper[4911]: I0606 10:01:03.291337 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-ftrzb" event={"ID":"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a","Type":"ContainerStarted","Data":"68d6382c8e862f43a598b774d8758b0622e3c1535a2c8ab8cce546576a6a2085"} Jun 06 10:01:03 crc kubenswrapper[4911]: I0606 10:01:03.311951 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-ftrzb" podStartSLOduration=1.311927593 podStartE2EDuration="1.311927593s" podCreationTimestamp="2025-06-06 10:01:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:01:03.303882784 +0000 UTC m=+2874.579308347" watchObservedRunningTime="2025-06-06 10:01:03.311927593 +0000 UTC m=+2874.587353136" Jun 06 10:01:04 crc kubenswrapper[4911]: I0606 10:01:04.302543 4911 generic.go:334] "Generic (PLEG): container finished" podID="92c84921-c3c0-43d5-b87c-813ffe3fa478" containerID="7dd0f2780ec59b7fe37c9e2b3ed59448ad0f86cc47725556e6247cdec7935120" exitCode=0 Jun 06 10:01:04 crc kubenswrapper[4911]: I0606 10:01:04.302652 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153401-s7jg4" event={"ID":"92c84921-c3c0-43d5-b87c-813ffe3fa478","Type":"ContainerDied","Data":"7dd0f2780ec59b7fe37c9e2b3ed59448ad0f86cc47725556e6247cdec7935120"} Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.337047 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153401-s7jg4" event={"ID":"92c84921-c3c0-43d5-b87c-813ffe3fa478","Type":"ContainerDied","Data":"8d107bd8ffe34cae571b5020ff8fd5c116a8b1bd682107251eee27a170cf2ac2"} Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.337730 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d107bd8ffe34cae571b5020ff8fd5c116a8b1bd682107251eee27a170cf2ac2" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.435698 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.496751 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-combined-ca-bundle\") pod \"92c84921-c3c0-43d5-b87c-813ffe3fa478\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.496825 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg7wm\" (UniqueName: \"kubernetes.io/projected/92c84921-c3c0-43d5-b87c-813ffe3fa478-kube-api-access-kg7wm\") pod \"92c84921-c3c0-43d5-b87c-813ffe3fa478\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.496859 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-config-data\") pod \"92c84921-c3c0-43d5-b87c-813ffe3fa478\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.496987 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-fernet-keys\") pod \"92c84921-c3c0-43d5-b87c-813ffe3fa478\" (UID: \"92c84921-c3c0-43d5-b87c-813ffe3fa478\") " Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.502991 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92c84921-c3c0-43d5-b87c-813ffe3fa478-kube-api-access-kg7wm" (OuterVolumeSpecName: "kube-api-access-kg7wm") pod "92c84921-c3c0-43d5-b87c-813ffe3fa478" (UID: "92c84921-c3c0-43d5-b87c-813ffe3fa478"). InnerVolumeSpecName "kube-api-access-kg7wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.506354 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "92c84921-c3c0-43d5-b87c-813ffe3fa478" (UID: "92c84921-c3c0-43d5-b87c-813ffe3fa478"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.544552 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92c84921-c3c0-43d5-b87c-813ffe3fa478" (UID: "92c84921-c3c0-43d5-b87c-813ffe3fa478"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.567550 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-config-data" (OuterVolumeSpecName: "config-data") pod "92c84921-c3c0-43d5-b87c-813ffe3fa478" (UID: "92c84921-c3c0-43d5-b87c-813ffe3fa478"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.599690 4911 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-fernet-keys\") on node \"crc\" DevicePath \"\"" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.599743 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.599758 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg7wm\" (UniqueName: \"kubernetes.io/projected/92c84921-c3c0-43d5-b87c-813ffe3fa478-kube-api-access-kg7wm\") on node \"crc\" DevicePath \"\"" Jun 06 10:01:06 crc kubenswrapper[4911]: I0606 10:01:06.599771 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c84921-c3c0-43d5-b87c-813ffe3fa478-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 10:01:07 crc kubenswrapper[4911]: I0606 10:01:07.344563 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153401-s7jg4" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.186636 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-ftrzb"] Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.187596 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-ftrzb" podUID="f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" containerName="container-00" containerID="cri-o://e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d" gracePeriod=2 Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.199074 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-ftrzb"] Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.289616 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ftrzb" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.408822 4911 generic.go:334] "Generic (PLEG): container finished" podID="f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" containerID="e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d" exitCode=0 Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.408913 4911 scope.go:117] "RemoveContainer" containerID="e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.409623 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ftrzb" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.428966 4911 scope.go:117] "RemoveContainer" containerID="e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d" Jun 06 10:01:13 crc kubenswrapper[4911]: E0606 10:01:13.429824 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d\": container with ID starting with e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d not found: ID does not exist" containerID="e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.429898 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d"} err="failed to get container status \"e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d\": rpc error: code = NotFound desc = could not find container \"e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d\": container with ID starting with e585b24e6d03484c775741d40e50a8bbc1ae183881a88b6ee0486eb7f120a09d not found: ID does not exist" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.464964 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-host\") pod \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.465042 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-247wx\" (UniqueName: \"kubernetes.io/projected/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-kube-api-access-247wx\") pod \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\" (UID: \"f10dc1ba-3c6c-4fdd-92e3-ab284cda651a\") " Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.465129 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-host" (OuterVolumeSpecName: "host") pod "f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" (UID: "f10dc1ba-3c6c-4fdd-92e3-ab284cda651a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.465658 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.471050 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-kube-api-access-247wx" (OuterVolumeSpecName: "kube-api-access-247wx") pod "f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" (UID: "f10dc1ba-3c6c-4fdd-92e3-ab284cda651a"). InnerVolumeSpecName "kube-api-access-247wx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.569331 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-247wx\" (UniqueName: \"kubernetes.io/projected/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a-kube-api-access-247wx\") on node \"crc\" DevicePath \"\"" Jun 06 10:01:13 crc kubenswrapper[4911]: I0606 10:01:13.960494 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" path="/var/lib/kubelet/pods/f10dc1ba-3c6c-4fdd-92e3-ab284cda651a/volumes" Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.300190 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.300736 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.300800 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.301683 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.301750 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" gracePeriod=600 Jun 06 10:01:24 crc kubenswrapper[4911]: E0606 10:01:24.428856 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.507767 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" exitCode=0 Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.507812 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac"} Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.508170 4911 scope.go:117] "RemoveContainer" containerID="a26b21cd2dacce99add5f3271857622702e798058ab60957fc172d6f9b03523a" Jun 06 10:01:24 crc kubenswrapper[4911]: I0606 10:01:24.509253 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:01:24 crc kubenswrapper[4911]: E0606 10:01:24.509906 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:01:37 crc kubenswrapper[4911]: I0606 10:01:37.948279 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:01:37 crc kubenswrapper[4911]: E0606 10:01:37.949079 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:01:50 crc kubenswrapper[4911]: I0606 10:01:50.948581 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:01:50 crc kubenswrapper[4911]: E0606 10:01:50.949558 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.561536 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-2lsjv"] Jun 06 10:02:01 crc kubenswrapper[4911]: E0606 10:02:01.563260 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" containerName="container-00" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.563283 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" containerName="container-00" Jun 06 10:02:01 crc kubenswrapper[4911]: E0606 10:02:01.563319 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c84921-c3c0-43d5-b87c-813ffe3fa478" containerName="keystone-cron" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.563333 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c84921-c3c0-43d5-b87c-813ffe3fa478" containerName="keystone-cron" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.565134 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="92c84921-c3c0-43d5-b87c-813ffe3fa478" containerName="keystone-cron" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.565247 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f10dc1ba-3c6c-4fdd-92e3-ab284cda651a" containerName="container-00" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.566586 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2lsjv" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.682914 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-host\") pod \"crc-debug-2lsjv\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " pod="openstack/crc-debug-2lsjv" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.683016 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-549t9\" (UniqueName: \"kubernetes.io/projected/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-kube-api-access-549t9\") pod \"crc-debug-2lsjv\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " pod="openstack/crc-debug-2lsjv" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.785064 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-549t9\" (UniqueName: \"kubernetes.io/projected/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-kube-api-access-549t9\") pod \"crc-debug-2lsjv\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " pod="openstack/crc-debug-2lsjv" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.785260 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-host\") pod \"crc-debug-2lsjv\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " pod="openstack/crc-debug-2lsjv" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.785385 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-host\") pod \"crc-debug-2lsjv\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " pod="openstack/crc-debug-2lsjv" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.804847 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-549t9\" (UniqueName: \"kubernetes.io/projected/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-kube-api-access-549t9\") pod \"crc-debug-2lsjv\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " pod="openstack/crc-debug-2lsjv" Jun 06 10:02:01 crc kubenswrapper[4911]: I0606 10:02:01.889743 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2lsjv" Jun 06 10:02:02 crc kubenswrapper[4911]: I0606 10:02:02.894859 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2lsjv" event={"ID":"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c","Type":"ContainerStarted","Data":"b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9"} Jun 06 10:02:02 crc kubenswrapper[4911]: I0606 10:02:02.895384 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2lsjv" event={"ID":"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c","Type":"ContainerStarted","Data":"7085fc63af8c13ee8c449ffeb5f812ebcc6fdf5354d78d7ebaba56c6b3587163"} Jun 06 10:02:02 crc kubenswrapper[4911]: I0606 10:02:02.909491 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-2lsjv" podStartSLOduration=1.909474377 podStartE2EDuration="1.909474377s" podCreationTimestamp="2025-06-06 10:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:02:02.90882681 +0000 UTC m=+2934.184252353" watchObservedRunningTime="2025-06-06 10:02:02.909474377 +0000 UTC m=+2934.184899920" Jun 06 10:02:04 crc kubenswrapper[4911]: I0606 10:02:04.947896 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:02:04 crc kubenswrapper[4911]: E0606 10:02:04.948564 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.507707 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-2lsjv"] Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.509996 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-2lsjv" podUID="9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" containerName="container-00" containerID="cri-o://b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9" gracePeriod=2 Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.516772 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-2lsjv"] Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.615419 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2lsjv" Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.711915 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-549t9\" (UniqueName: \"kubernetes.io/projected/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-kube-api-access-549t9\") pod \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.711984 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-host\") pod \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\" (UID: \"9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c\") " Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.712134 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-host" (OuterVolumeSpecName: "host") pod "9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" (UID: "9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.712825 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.717497 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-kube-api-access-549t9" (OuterVolumeSpecName: "kube-api-access-549t9") pod "9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" (UID: "9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c"). InnerVolumeSpecName "kube-api-access-549t9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.815664 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-549t9\" (UniqueName: \"kubernetes.io/projected/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c-kube-api-access-549t9\") on node \"crc\" DevicePath \"\"" Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.993229 4911 generic.go:334] "Generic (PLEG): container finished" podID="9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" containerID="b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9" exitCode=0 Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.993274 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2lsjv" Jun 06 10:02:12 crc kubenswrapper[4911]: I0606 10:02:12.993294 4911 scope.go:117] "RemoveContainer" containerID="b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9" Jun 06 10:02:13 crc kubenswrapper[4911]: I0606 10:02:13.021870 4911 scope.go:117] "RemoveContainer" containerID="b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9" Jun 06 10:02:13 crc kubenswrapper[4911]: E0606 10:02:13.022512 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9\": container with ID starting with b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9 not found: ID does not exist" containerID="b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9" Jun 06 10:02:13 crc kubenswrapper[4911]: I0606 10:02:13.022558 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9"} err="failed to get container status \"b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9\": rpc error: code = NotFound desc = could not find container \"b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9\": container with ID starting with b2040b9a2cabee56c1f261f5423b9ab9cef19a26af103f36cd0990ad4d0369d9 not found: ID does not exist" Jun 06 10:02:13 crc kubenswrapper[4911]: I0606 10:02:13.959594 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" path="/var/lib/kubelet/pods/9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c/volumes" Jun 06 10:02:18 crc kubenswrapper[4911]: I0606 10:02:18.951357 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:02:18 crc kubenswrapper[4911]: E0606 10:02:18.952487 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:02:29 crc kubenswrapper[4911]: I0606 10:02:29.955252 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:02:29 crc kubenswrapper[4911]: E0606 10:02:29.956170 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:02:44 crc kubenswrapper[4911]: I0606 10:02:44.948262 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:02:44 crc kubenswrapper[4911]: E0606 10:02:44.948928 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:02:56 crc kubenswrapper[4911]: I0606 10:02:56.948496 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:02:56 crc kubenswrapper[4911]: E0606 10:02:56.949230 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:03:01 crc kubenswrapper[4911]: I0606 10:03:01.959336 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-vzfzh"] Jun 06 10:03:01 crc kubenswrapper[4911]: E0606 10:03:01.960137 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" containerName="container-00" Jun 06 10:03:01 crc kubenswrapper[4911]: I0606 10:03:01.960150 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" containerName="container-00" Jun 06 10:03:01 crc kubenswrapper[4911]: I0606 10:03:01.960355 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f0c60c6-4027-4ddf-9cf7-00f4e6fd220c" containerName="container-00" Jun 06 10:03:01 crc kubenswrapper[4911]: I0606 10:03:01.961053 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.010329 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-host\") pod \"crc-debug-vzfzh\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.010909 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4w4l\" (UniqueName: \"kubernetes.io/projected/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-kube-api-access-j4w4l\") pod \"crc-debug-vzfzh\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.112778 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-host\") pod \"crc-debug-vzfzh\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.112988 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4w4l\" (UniqueName: \"kubernetes.io/projected/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-kube-api-access-j4w4l\") pod \"crc-debug-vzfzh\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.112997 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-host\") pod \"crc-debug-vzfzh\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.135215 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4w4l\" (UniqueName: \"kubernetes.io/projected/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-kube-api-access-j4w4l\") pod \"crc-debug-vzfzh\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.281237 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vzfzh" Jun 06 10:03:02 crc kubenswrapper[4911]: I0606 10:03:02.446776 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-vzfzh" event={"ID":"2a79f385-7b2d-406c-a977-b24fd8dd1b1e","Type":"ContainerStarted","Data":"28024a4bb54e8890a69b1a3a9b554db6df3787b6178b1d2753c1a5344bc30fc3"} Jun 06 10:03:03 crc kubenswrapper[4911]: I0606 10:03:03.457172 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-vzfzh" event={"ID":"2a79f385-7b2d-406c-a977-b24fd8dd1b1e","Type":"ContainerStarted","Data":"f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae"} Jun 06 10:03:03 crc kubenswrapper[4911]: I0606 10:03:03.479222 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-vzfzh" podStartSLOduration=2.479196736 podStartE2EDuration="2.479196736s" podCreationTimestamp="2025-06-06 10:03:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:03:03.468778625 +0000 UTC m=+2994.744204178" watchObservedRunningTime="2025-06-06 10:03:03.479196736 +0000 UTC m=+2994.754622269" Jun 06 10:03:11 crc kubenswrapper[4911]: I0606 10:03:11.947808 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:03:11 crc kubenswrapper[4911]: E0606 10:03:11.948742 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:03:12 crc kubenswrapper[4911]: I0606 10:03:12.871713 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-vzfzh"] Jun 06 10:03:12 crc kubenswrapper[4911]: I0606 10:03:12.872069 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-vzfzh" podUID="2a79f385-7b2d-406c-a977-b24fd8dd1b1e" containerName="container-00" containerID="cri-o://f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae" gracePeriod=2 Jun 06 10:03:12 crc kubenswrapper[4911]: I0606 10:03:12.882421 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-vzfzh"] Jun 06 10:03:12 crc kubenswrapper[4911]: I0606 10:03:12.972240 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vzfzh" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.042643 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-host\") pod \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.043030 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4w4l\" (UniqueName: \"kubernetes.io/projected/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-kube-api-access-j4w4l\") pod \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\" (UID: \"2a79f385-7b2d-406c-a977-b24fd8dd1b1e\") " Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.044320 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-host" (OuterVolumeSpecName: "host") pod "2a79f385-7b2d-406c-a977-b24fd8dd1b1e" (UID: "2a79f385-7b2d-406c-a977-b24fd8dd1b1e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.049472 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-kube-api-access-j4w4l" (OuterVolumeSpecName: "kube-api-access-j4w4l") pod "2a79f385-7b2d-406c-a977-b24fd8dd1b1e" (UID: "2a79f385-7b2d-406c-a977-b24fd8dd1b1e"). InnerVolumeSpecName "kube-api-access-j4w4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.144515 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.144552 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4w4l\" (UniqueName: \"kubernetes.io/projected/2a79f385-7b2d-406c-a977-b24fd8dd1b1e-kube-api-access-j4w4l\") on node \"crc\" DevicePath \"\"" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.538375 4911 generic.go:334] "Generic (PLEG): container finished" podID="2a79f385-7b2d-406c-a977-b24fd8dd1b1e" containerID="f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae" exitCode=0 Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.538426 4911 scope.go:117] "RemoveContainer" containerID="f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.538551 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vzfzh" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.566476 4911 scope.go:117] "RemoveContainer" containerID="f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae" Jun 06 10:03:13 crc kubenswrapper[4911]: E0606 10:03:13.567067 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae\": container with ID starting with f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae not found: ID does not exist" containerID="f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.567330 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae"} err="failed to get container status \"f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae\": rpc error: code = NotFound desc = could not find container \"f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae\": container with ID starting with f0f15e1b2cf77a6b7dfc6142dcead0431474a0ef68eb6efa06d9b258164d41ae not found: ID does not exist" Jun 06 10:03:13 crc kubenswrapper[4911]: I0606 10:03:13.960519 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a79f385-7b2d-406c-a977-b24fd8dd1b1e" path="/var/lib/kubelet/pods/2a79f385-7b2d-406c-a977-b24fd8dd1b1e/volumes" Jun 06 10:03:26 crc kubenswrapper[4911]: I0606 10:03:26.948064 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:03:26 crc kubenswrapper[4911]: E0606 10:03:26.948998 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:03:38 crc kubenswrapper[4911]: I0606 10:03:38.948215 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:03:38 crc kubenswrapper[4911]: E0606 10:03:38.949005 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:03:53 crc kubenswrapper[4911]: I0606 10:03:53.947785 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:03:53 crc kubenswrapper[4911]: E0606 10:03:53.948592 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.232172 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-dc2d4"] Jun 06 10:04:02 crc kubenswrapper[4911]: E0606 10:04:02.233048 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a79f385-7b2d-406c-a977-b24fd8dd1b1e" containerName="container-00" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.233060 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a79f385-7b2d-406c-a977-b24fd8dd1b1e" containerName="container-00" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.233544 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a79f385-7b2d-406c-a977-b24fd8dd1b1e" containerName="container-00" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.234162 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.343697 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7cc54265-5646-4d34-bf5f-b197b9c818f5-host\") pod \"crc-debug-dc2d4\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.344186 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rlcs\" (UniqueName: \"kubernetes.io/projected/7cc54265-5646-4d34-bf5f-b197b9c818f5-kube-api-access-4rlcs\") pod \"crc-debug-dc2d4\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.446446 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7cc54265-5646-4d34-bf5f-b197b9c818f5-host\") pod \"crc-debug-dc2d4\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.446545 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7cc54265-5646-4d34-bf5f-b197b9c818f5-host\") pod \"crc-debug-dc2d4\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.446562 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rlcs\" (UniqueName: \"kubernetes.io/projected/7cc54265-5646-4d34-bf5f-b197b9c818f5-kube-api-access-4rlcs\") pod \"crc-debug-dc2d4\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.472472 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rlcs\" (UniqueName: \"kubernetes.io/projected/7cc54265-5646-4d34-bf5f-b197b9c818f5-kube-api-access-4rlcs\") pod \"crc-debug-dc2d4\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.553587 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dc2d4" Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.963762 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dc2d4" event={"ID":"7cc54265-5646-4d34-bf5f-b197b9c818f5","Type":"ContainerStarted","Data":"bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc"} Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.964189 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dc2d4" event={"ID":"7cc54265-5646-4d34-bf5f-b197b9c818f5","Type":"ContainerStarted","Data":"70015578dbf8a0b9a2097be200e2d70e3cf19c4be9daaef5267edda67c589714"} Jun 06 10:04:02 crc kubenswrapper[4911]: I0606 10:04:02.982119 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-dc2d4" podStartSLOduration=0.982080491 podStartE2EDuration="982.080491ms" podCreationTimestamp="2025-06-06 10:04:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:04:02.976844345 +0000 UTC m=+3054.252269898" watchObservedRunningTime="2025-06-06 10:04:02.982080491 +0000 UTC m=+3054.257506034" Jun 06 10:04:04 crc kubenswrapper[4911]: I0606 10:04:04.949173 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:04:04 crc kubenswrapper[4911]: E0606 10:04:04.949988 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.191664 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-dc2d4"] Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.192945 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-dc2d4" podUID="7cc54265-5646-4d34-bf5f-b197b9c818f5" containerName="container-00" containerID="cri-o://bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc" gracePeriod=2 Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.205735 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-dc2d4"] Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.313530 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dc2d4" Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.388693 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rlcs\" (UniqueName: \"kubernetes.io/projected/7cc54265-5646-4d34-bf5f-b197b9c818f5-kube-api-access-4rlcs\") pod \"7cc54265-5646-4d34-bf5f-b197b9c818f5\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.388902 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7cc54265-5646-4d34-bf5f-b197b9c818f5-host\") pod \"7cc54265-5646-4d34-bf5f-b197b9c818f5\" (UID: \"7cc54265-5646-4d34-bf5f-b197b9c818f5\") " Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.389000 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7cc54265-5646-4d34-bf5f-b197b9c818f5-host" (OuterVolumeSpecName: "host") pod "7cc54265-5646-4d34-bf5f-b197b9c818f5" (UID: "7cc54265-5646-4d34-bf5f-b197b9c818f5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.389407 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7cc54265-5646-4d34-bf5f-b197b9c818f5-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.398772 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cc54265-5646-4d34-bf5f-b197b9c818f5-kube-api-access-4rlcs" (OuterVolumeSpecName: "kube-api-access-4rlcs") pod "7cc54265-5646-4d34-bf5f-b197b9c818f5" (UID: "7cc54265-5646-4d34-bf5f-b197b9c818f5"). InnerVolumeSpecName "kube-api-access-4rlcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.490970 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rlcs\" (UniqueName: \"kubernetes.io/projected/7cc54265-5646-4d34-bf5f-b197b9c818f5-kube-api-access-4rlcs\") on node \"crc\" DevicePath \"\"" Jun 06 10:04:13 crc kubenswrapper[4911]: I0606 10:04:13.958254 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cc54265-5646-4d34-bf5f-b197b9c818f5" path="/var/lib/kubelet/pods/7cc54265-5646-4d34-bf5f-b197b9c818f5/volumes" Jun 06 10:04:14 crc kubenswrapper[4911]: I0606 10:04:14.068861 4911 generic.go:334] "Generic (PLEG): container finished" podID="7cc54265-5646-4d34-bf5f-b197b9c818f5" containerID="bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc" exitCode=0 Jun 06 10:04:14 crc kubenswrapper[4911]: I0606 10:04:14.068913 4911 scope.go:117] "RemoveContainer" containerID="bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc" Jun 06 10:04:14 crc kubenswrapper[4911]: I0606 10:04:14.068908 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dc2d4" Jun 06 10:04:14 crc kubenswrapper[4911]: I0606 10:04:14.089511 4911 scope.go:117] "RemoveContainer" containerID="bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc" Jun 06 10:04:14 crc kubenswrapper[4911]: E0606 10:04:14.093534 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc\": container with ID starting with bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc not found: ID does not exist" containerID="bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc" Jun 06 10:04:14 crc kubenswrapper[4911]: I0606 10:04:14.093576 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc"} err="failed to get container status \"bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc\": rpc error: code = NotFound desc = could not find container \"bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc\": container with ID starting with bb4630958131a1ec12088a3fc24d2ae95eff851cfa7d401c39983f26048926cc not found: ID does not exist" Jun 06 10:04:19 crc kubenswrapper[4911]: I0606 10:04:19.956071 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:04:19 crc kubenswrapper[4911]: E0606 10:04:19.956873 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:04:31 crc kubenswrapper[4911]: I0606 10:04:31.948483 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:04:31 crc kubenswrapper[4911]: E0606 10:04:31.949280 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:04:42 crc kubenswrapper[4911]: I0606 10:04:42.948289 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:04:42 crc kubenswrapper[4911]: E0606 10:04:42.949251 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:04:53 crc kubenswrapper[4911]: I0606 10:04:53.948554 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:04:53 crc kubenswrapper[4911]: E0606 10:04:53.949336 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.437589 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bqkl4"] Jun 06 10:04:59 crc kubenswrapper[4911]: E0606 10:04:59.439480 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cc54265-5646-4d34-bf5f-b197b9c818f5" containerName="container-00" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.439505 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cc54265-5646-4d34-bf5f-b197b9c818f5" containerName="container-00" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.439875 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cc54265-5646-4d34-bf5f-b197b9c818f5" containerName="container-00" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.443433 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.465335 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bqkl4"] Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.532530 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-utilities\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.532655 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-catalog-content\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.532747 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc97s\" (UniqueName: \"kubernetes.io/projected/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-kube-api-access-pc97s\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.634477 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-catalog-content\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.635066 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-catalog-content\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.635074 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc97s\" (UniqueName: \"kubernetes.io/projected/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-kube-api-access-pc97s\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.635392 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-utilities\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.636160 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-utilities\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.658889 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc97s\" (UniqueName: \"kubernetes.io/projected/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-kube-api-access-pc97s\") pod \"community-operators-bqkl4\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:04:59 crc kubenswrapper[4911]: I0606 10:04:59.777663 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:05:00 crc kubenswrapper[4911]: I0606 10:05:00.548817 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bqkl4"] Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.495234 4911 generic.go:334] "Generic (PLEG): container finished" podID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerID="907919e3167486366ad54eebf61120d37e106dd702941e9ad09aeff419acb153" exitCode=0 Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.495354 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bqkl4" event={"ID":"19d6c344-2ed1-4cf8-89f9-f2c979398e4b","Type":"ContainerDied","Data":"907919e3167486366ad54eebf61120d37e106dd702941e9ad09aeff419acb153"} Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.495552 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bqkl4" event={"ID":"19d6c344-2ed1-4cf8-89f9-f2c979398e4b","Type":"ContainerStarted","Data":"d9e8307468a4dba14426f837e6888c9a61e3bb969d801b81b04bd8ae8a1c4baf"} Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.498733 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.608830 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-zq2r5"] Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.610316 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zq2r5" Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.675741 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7g9k\" (UniqueName: \"kubernetes.io/projected/408f5443-8b41-4e4a-9988-a9b95fefc017-kube-api-access-q7g9k\") pod \"crc-debug-zq2r5\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " pod="openstack/crc-debug-zq2r5" Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.676285 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/408f5443-8b41-4e4a-9988-a9b95fefc017-host\") pod \"crc-debug-zq2r5\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " pod="openstack/crc-debug-zq2r5" Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.779449 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/408f5443-8b41-4e4a-9988-a9b95fefc017-host\") pod \"crc-debug-zq2r5\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " pod="openstack/crc-debug-zq2r5" Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.779640 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/408f5443-8b41-4e4a-9988-a9b95fefc017-host\") pod \"crc-debug-zq2r5\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " pod="openstack/crc-debug-zq2r5" Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.779681 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7g9k\" (UniqueName: \"kubernetes.io/projected/408f5443-8b41-4e4a-9988-a9b95fefc017-kube-api-access-q7g9k\") pod \"crc-debug-zq2r5\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " pod="openstack/crc-debug-zq2r5" Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.801761 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7g9k\" (UniqueName: \"kubernetes.io/projected/408f5443-8b41-4e4a-9988-a9b95fefc017-kube-api-access-q7g9k\") pod \"crc-debug-zq2r5\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " pod="openstack/crc-debug-zq2r5" Jun 06 10:05:01 crc kubenswrapper[4911]: I0606 10:05:01.932859 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zq2r5" Jun 06 10:05:02 crc kubenswrapper[4911]: I0606 10:05:02.505081 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-zq2r5" event={"ID":"408f5443-8b41-4e4a-9988-a9b95fefc017","Type":"ContainerStarted","Data":"7f10344f140542a15c91d7478c866f9007fc11a8091f92dd205d77f970150d02"} Jun 06 10:05:02 crc kubenswrapper[4911]: I0606 10:05:02.505410 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-zq2r5" event={"ID":"408f5443-8b41-4e4a-9988-a9b95fefc017","Type":"ContainerStarted","Data":"8fd015510b009fed29c608a180b5f84d9204d5deb41f90850cedb931762843db"} Jun 06 10:05:02 crc kubenswrapper[4911]: I0606 10:05:02.520667 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-zq2r5" podStartSLOduration=1.5206503850000002 podStartE2EDuration="1.520650385s" podCreationTimestamp="2025-06-06 10:05:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:05:02.518323824 +0000 UTC m=+3113.793749387" watchObservedRunningTime="2025-06-06 10:05:02.520650385 +0000 UTC m=+3113.796075928" Jun 06 10:05:03 crc kubenswrapper[4911]: I0606 10:05:03.532137 4911 generic.go:334] "Generic (PLEG): container finished" podID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerID="165c8a2e71abd66f4960342df6ec28b46047cb137670e9894828a314c0b066f5" exitCode=0 Jun 06 10:05:03 crc kubenswrapper[4911]: I0606 10:05:03.532390 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bqkl4" event={"ID":"19d6c344-2ed1-4cf8-89f9-f2c979398e4b","Type":"ContainerDied","Data":"165c8a2e71abd66f4960342df6ec28b46047cb137670e9894828a314c0b066f5"} Jun 06 10:05:04 crc kubenswrapper[4911]: I0606 10:05:04.542953 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bqkl4" event={"ID":"19d6c344-2ed1-4cf8-89f9-f2c979398e4b","Type":"ContainerStarted","Data":"d28e9f65d767f8c054acebf405d214205cd30136adfd375e2a34f14a94d01855"} Jun 06 10:05:04 crc kubenswrapper[4911]: I0606 10:05:04.570232 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bqkl4" podStartSLOduration=3.127604295 podStartE2EDuration="5.570214063s" podCreationTimestamp="2025-06-06 10:04:59 +0000 UTC" firstStartedPulling="2025-06-06 10:05:01.498475298 +0000 UTC m=+3112.773900841" lastFinishedPulling="2025-06-06 10:05:03.941085066 +0000 UTC m=+3115.216510609" observedRunningTime="2025-06-06 10:05:04.563321894 +0000 UTC m=+3115.838747437" watchObservedRunningTime="2025-06-06 10:05:04.570214063 +0000 UTC m=+3115.845639606" Jun 06 10:05:04 crc kubenswrapper[4911]: I0606 10:05:04.948974 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:05:04 crc kubenswrapper[4911]: E0606 10:05:04.949733 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:05:09 crc kubenswrapper[4911]: I0606 10:05:09.778578 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:05:09 crc kubenswrapper[4911]: I0606 10:05:09.779344 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:05:09 crc kubenswrapper[4911]: I0606 10:05:09.824890 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:05:10 crc kubenswrapper[4911]: I0606 10:05:10.642796 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:05:10 crc kubenswrapper[4911]: I0606 10:05:10.690403 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bqkl4"] Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.543774 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-zq2r5"] Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.544438 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-zq2r5" podUID="408f5443-8b41-4e4a-9988-a9b95fefc017" containerName="container-00" containerID="cri-o://7f10344f140542a15c91d7478c866f9007fc11a8091f92dd205d77f970150d02" gracePeriod=2 Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.558050 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-zq2r5"] Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.610724 4911 generic.go:334] "Generic (PLEG): container finished" podID="408f5443-8b41-4e4a-9988-a9b95fefc017" containerID="7f10344f140542a15c91d7478c866f9007fc11a8091f92dd205d77f970150d02" exitCode=0 Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.610859 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fd015510b009fed29c608a180b5f84d9204d5deb41f90850cedb931762843db" Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.611047 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bqkl4" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="registry-server" containerID="cri-o://d28e9f65d767f8c054acebf405d214205cd30136adfd375e2a34f14a94d01855" gracePeriod=2 Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.788612 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zq2r5" Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.832049 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/408f5443-8b41-4e4a-9988-a9b95fefc017-host\") pod \"408f5443-8b41-4e4a-9988-a9b95fefc017\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.832145 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7g9k\" (UniqueName: \"kubernetes.io/projected/408f5443-8b41-4e4a-9988-a9b95fefc017-kube-api-access-q7g9k\") pod \"408f5443-8b41-4e4a-9988-a9b95fefc017\" (UID: \"408f5443-8b41-4e4a-9988-a9b95fefc017\") " Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.832190 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/408f5443-8b41-4e4a-9988-a9b95fefc017-host" (OuterVolumeSpecName: "host") pod "408f5443-8b41-4e4a-9988-a9b95fefc017" (UID: "408f5443-8b41-4e4a-9988-a9b95fefc017"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.832648 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/408f5443-8b41-4e4a-9988-a9b95fefc017-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.841080 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/408f5443-8b41-4e4a-9988-a9b95fefc017-kube-api-access-q7g9k" (OuterVolumeSpecName: "kube-api-access-q7g9k") pod "408f5443-8b41-4e4a-9988-a9b95fefc017" (UID: "408f5443-8b41-4e4a-9988-a9b95fefc017"). InnerVolumeSpecName "kube-api-access-q7g9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:05:12 crc kubenswrapper[4911]: I0606 10:05:12.936211 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7g9k\" (UniqueName: \"kubernetes.io/projected/408f5443-8b41-4e4a-9988-a9b95fefc017-kube-api-access-q7g9k\") on node \"crc\" DevicePath \"\"" Jun 06 10:05:13 crc kubenswrapper[4911]: I0606 10:05:13.626163 4911 generic.go:334] "Generic (PLEG): container finished" podID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerID="d28e9f65d767f8c054acebf405d214205cd30136adfd375e2a34f14a94d01855" exitCode=0 Jun 06 10:05:13 crc kubenswrapper[4911]: I0606 10:05:13.626213 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bqkl4" event={"ID":"19d6c344-2ed1-4cf8-89f9-f2c979398e4b","Type":"ContainerDied","Data":"d28e9f65d767f8c054acebf405d214205cd30136adfd375e2a34f14a94d01855"} Jun 06 10:05:13 crc kubenswrapper[4911]: I0606 10:05:13.626544 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zq2r5" Jun 06 10:05:13 crc kubenswrapper[4911]: I0606 10:05:13.958554 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="408f5443-8b41-4e4a-9988-a9b95fefc017" path="/var/lib/kubelet/pods/408f5443-8b41-4e4a-9988-a9b95fefc017/volumes" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.024236 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.053812 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-utilities\") pod \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.053959 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc97s\" (UniqueName: \"kubernetes.io/projected/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-kube-api-access-pc97s\") pod \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.054058 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-catalog-content\") pod \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\" (UID: \"19d6c344-2ed1-4cf8-89f9-f2c979398e4b\") " Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.057525 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-utilities" (OuterVolumeSpecName: "utilities") pod "19d6c344-2ed1-4cf8-89f9-f2c979398e4b" (UID: "19d6c344-2ed1-4cf8-89f9-f2c979398e4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.099418 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19d6c344-2ed1-4cf8-89f9-f2c979398e4b" (UID: "19d6c344-2ed1-4cf8-89f9-f2c979398e4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.116614 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-kube-api-access-pc97s" (OuterVolumeSpecName: "kube-api-access-pc97s") pod "19d6c344-2ed1-4cf8-89f9-f2c979398e4b" (UID: "19d6c344-2ed1-4cf8-89f9-f2c979398e4b"). InnerVolumeSpecName "kube-api-access-pc97s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.156762 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.156812 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc97s\" (UniqueName: \"kubernetes.io/projected/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-kube-api-access-pc97s\") on node \"crc\" DevicePath \"\"" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.156825 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19d6c344-2ed1-4cf8-89f9-f2c979398e4b-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.641309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bqkl4" event={"ID":"19d6c344-2ed1-4cf8-89f9-f2c979398e4b","Type":"ContainerDied","Data":"d9e8307468a4dba14426f837e6888c9a61e3bb969d801b81b04bd8ae8a1c4baf"} Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.641418 4911 scope.go:117] "RemoveContainer" containerID="d28e9f65d767f8c054acebf405d214205cd30136adfd375e2a34f14a94d01855" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.641465 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bqkl4" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.702225 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bqkl4"] Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.712245 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bqkl4"] Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.713263 4911 scope.go:117] "RemoveContainer" containerID="165c8a2e71abd66f4960342df6ec28b46047cb137670e9894828a314c0b066f5" Jun 06 10:05:14 crc kubenswrapper[4911]: I0606 10:05:14.742142 4911 scope.go:117] "RemoveContainer" containerID="907919e3167486366ad54eebf61120d37e106dd702941e9ad09aeff419acb153" Jun 06 10:05:15 crc kubenswrapper[4911]: I0606 10:05:15.959735 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" path="/var/lib/kubelet/pods/19d6c344-2ed1-4cf8-89f9-f2c979398e4b/volumes" Jun 06 10:05:18 crc kubenswrapper[4911]: I0606 10:05:18.948404 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:05:18 crc kubenswrapper[4911]: E0606 10:05:18.949203 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:05:30 crc kubenswrapper[4911]: I0606 10:05:30.948579 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:05:30 crc kubenswrapper[4911]: E0606 10:05:30.949352 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:05:42 crc kubenswrapper[4911]: I0606 10:05:42.948421 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:05:42 crc kubenswrapper[4911]: E0606 10:05:42.949616 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:05:53 crc kubenswrapper[4911]: I0606 10:05:53.948621 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:05:53 crc kubenswrapper[4911]: E0606 10:05:53.949470 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.967587 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-mnxrh"] Jun 06 10:06:01 crc kubenswrapper[4911]: E0606 10:06:01.968678 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="408f5443-8b41-4e4a-9988-a9b95fefc017" containerName="container-00" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.968695 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="408f5443-8b41-4e4a-9988-a9b95fefc017" containerName="container-00" Jun 06 10:06:01 crc kubenswrapper[4911]: E0606 10:06:01.968722 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="extract-utilities" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.968730 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="extract-utilities" Jun 06 10:06:01 crc kubenswrapper[4911]: E0606 10:06:01.968762 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="extract-content" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.968771 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="extract-content" Jun 06 10:06:01 crc kubenswrapper[4911]: E0606 10:06:01.968786 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="registry-server" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.968792 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="registry-server" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.969000 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="19d6c344-2ed1-4cf8-89f9-f2c979398e4b" containerName="registry-server" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.969033 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="408f5443-8b41-4e4a-9988-a9b95fefc017" containerName="container-00" Jun 06 10:06:01 crc kubenswrapper[4911]: I0606 10:06:01.969724 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mnxrh" Jun 06 10:06:02 crc kubenswrapper[4911]: I0606 10:06:02.143080 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4knt9\" (UniqueName: \"kubernetes.io/projected/50e31a07-b2d4-40dc-b716-21e946e99a9c-kube-api-access-4knt9\") pod \"crc-debug-mnxrh\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " pod="openstack/crc-debug-mnxrh" Jun 06 10:06:02 crc kubenswrapper[4911]: I0606 10:06:02.143652 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e31a07-b2d4-40dc-b716-21e946e99a9c-host\") pod \"crc-debug-mnxrh\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " pod="openstack/crc-debug-mnxrh" Jun 06 10:06:02 crc kubenswrapper[4911]: I0606 10:06:02.245383 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e31a07-b2d4-40dc-b716-21e946e99a9c-host\") pod \"crc-debug-mnxrh\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " pod="openstack/crc-debug-mnxrh" Jun 06 10:06:02 crc kubenswrapper[4911]: I0606 10:06:02.245427 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4knt9\" (UniqueName: \"kubernetes.io/projected/50e31a07-b2d4-40dc-b716-21e946e99a9c-kube-api-access-4knt9\") pod \"crc-debug-mnxrh\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " pod="openstack/crc-debug-mnxrh" Jun 06 10:06:02 crc kubenswrapper[4911]: I0606 10:06:02.245514 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e31a07-b2d4-40dc-b716-21e946e99a9c-host\") pod \"crc-debug-mnxrh\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " pod="openstack/crc-debug-mnxrh" Jun 06 10:06:02 crc kubenswrapper[4911]: I0606 10:06:02.286736 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4knt9\" (UniqueName: \"kubernetes.io/projected/50e31a07-b2d4-40dc-b716-21e946e99a9c-kube-api-access-4knt9\") pod \"crc-debug-mnxrh\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " pod="openstack/crc-debug-mnxrh" Jun 06 10:06:02 crc kubenswrapper[4911]: I0606 10:06:02.289956 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mnxrh" Jun 06 10:06:03 crc kubenswrapper[4911]: I0606 10:06:03.091352 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mnxrh" event={"ID":"50e31a07-b2d4-40dc-b716-21e946e99a9c","Type":"ContainerStarted","Data":"68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947"} Jun 06 10:06:03 crc kubenswrapper[4911]: I0606 10:06:03.091792 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mnxrh" event={"ID":"50e31a07-b2d4-40dc-b716-21e946e99a9c","Type":"ContainerStarted","Data":"7d00b4724810f42a27b34d7ec8969b194083a8cc214d6361cae9cd876dd5a356"} Jun 06 10:06:03 crc kubenswrapper[4911]: I0606 10:06:03.114872 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-mnxrh" podStartSLOduration=2.114847073 podStartE2EDuration="2.114847073s" podCreationTimestamp="2025-06-06 10:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:06:03.107153364 +0000 UTC m=+3174.382578927" watchObservedRunningTime="2025-06-06 10:06:03.114847073 +0000 UTC m=+3174.390272616" Jun 06 10:06:04 crc kubenswrapper[4911]: I0606 10:06:04.947601 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:06:04 crc kubenswrapper[4911]: E0606 10:06:04.948286 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:06:12 crc kubenswrapper[4911]: I0606 10:06:12.908809 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-mnxrh"] Jun 06 10:06:12 crc kubenswrapper[4911]: I0606 10:06:12.911660 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-mnxrh" podUID="50e31a07-b2d4-40dc-b716-21e946e99a9c" containerName="container-00" containerID="cri-o://68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947" gracePeriod=2 Jun 06 10:06:12 crc kubenswrapper[4911]: I0606 10:06:12.920934 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-mnxrh"] Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.033900 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mnxrh" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.082829 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e31a07-b2d4-40dc-b716-21e946e99a9c-host\") pod \"50e31a07-b2d4-40dc-b716-21e946e99a9c\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.082890 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4knt9\" (UniqueName: \"kubernetes.io/projected/50e31a07-b2d4-40dc-b716-21e946e99a9c-kube-api-access-4knt9\") pod \"50e31a07-b2d4-40dc-b716-21e946e99a9c\" (UID: \"50e31a07-b2d4-40dc-b716-21e946e99a9c\") " Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.084175 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50e31a07-b2d4-40dc-b716-21e946e99a9c-host" (OuterVolumeSpecName: "host") pod "50e31a07-b2d4-40dc-b716-21e946e99a9c" (UID: "50e31a07-b2d4-40dc-b716-21e946e99a9c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.103253 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50e31a07-b2d4-40dc-b716-21e946e99a9c-kube-api-access-4knt9" (OuterVolumeSpecName: "kube-api-access-4knt9") pod "50e31a07-b2d4-40dc-b716-21e946e99a9c" (UID: "50e31a07-b2d4-40dc-b716-21e946e99a9c"). InnerVolumeSpecName "kube-api-access-4knt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.179155 4911 generic.go:334] "Generic (PLEG): container finished" podID="50e31a07-b2d4-40dc-b716-21e946e99a9c" containerID="68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947" exitCode=0 Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.179199 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mnxrh" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.179232 4911 scope.go:117] "RemoveContainer" containerID="68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.184765 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e31a07-b2d4-40dc-b716-21e946e99a9c-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.184813 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4knt9\" (UniqueName: \"kubernetes.io/projected/50e31a07-b2d4-40dc-b716-21e946e99a9c-kube-api-access-4knt9\") on node \"crc\" DevicePath \"\"" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.200028 4911 scope.go:117] "RemoveContainer" containerID="68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947" Jun 06 10:06:13 crc kubenswrapper[4911]: E0606 10:06:13.200533 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947\": container with ID starting with 68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947 not found: ID does not exist" containerID="68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.200577 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947"} err="failed to get container status \"68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947\": rpc error: code = NotFound desc = could not find container \"68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947\": container with ID starting with 68fa7a2c63605bd5bd1b46fb728f243b9fd858b845609a1f1e6dbd68cebef947 not found: ID does not exist" Jun 06 10:06:13 crc kubenswrapper[4911]: I0606 10:06:13.959674 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50e31a07-b2d4-40dc-b716-21e946e99a9c" path="/var/lib/kubelet/pods/50e31a07-b2d4-40dc-b716-21e946e99a9c/volumes" Jun 06 10:06:17 crc kubenswrapper[4911]: I0606 10:06:17.949347 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:06:17 crc kubenswrapper[4911]: E0606 10:06:17.950844 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:06:21 crc kubenswrapper[4911]: I0606 10:06:21.386928 4911 scope.go:117] "RemoveContainer" containerID="5e38ccb390c0b66432d0f72072a6500dee02771fd6f64c63b79597c4218a0231" Jun 06 10:06:30 crc kubenswrapper[4911]: I0606 10:06:30.949664 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:06:31 crc kubenswrapper[4911]: I0606 10:06:31.335616 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"5eaedf378a467801de1d31fb5b2d7f81648dcedca173908f5c9a7a95a8b2ce65"} Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.351501 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-zt646"] Jun 06 10:07:02 crc kubenswrapper[4911]: E0606 10:07:02.352859 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50e31a07-b2d4-40dc-b716-21e946e99a9c" containerName="container-00" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.352875 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="50e31a07-b2d4-40dc-b716-21e946e99a9c" containerName="container-00" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.353061 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="50e31a07-b2d4-40dc-b716-21e946e99a9c" containerName="container-00" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.354114 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.469514 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5f958"] Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.471797 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.483614 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-host\") pod \"crc-debug-zt646\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.483694 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92k2l\" (UniqueName: \"kubernetes.io/projected/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-kube-api-access-92k2l\") pod \"crc-debug-zt646\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.502032 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5f958"] Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.585646 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvwk6\" (UniqueName: \"kubernetes.io/projected/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-kube-api-access-zvwk6\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.585746 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-host\") pod \"crc-debug-zt646\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.585780 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92k2l\" (UniqueName: \"kubernetes.io/projected/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-kube-api-access-92k2l\") pod \"crc-debug-zt646\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.585821 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-host\") pod \"crc-debug-zt646\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.585961 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-catalog-content\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.585994 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-utilities\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.606081 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92k2l\" (UniqueName: \"kubernetes.io/projected/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-kube-api-access-92k2l\") pod \"crc-debug-zt646\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.677543 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zt646" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.687959 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvwk6\" (UniqueName: \"kubernetes.io/projected/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-kube-api-access-zvwk6\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.688150 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-catalog-content\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.688192 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-utilities\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.688678 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-catalog-content\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.688743 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-utilities\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.706083 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvwk6\" (UniqueName: \"kubernetes.io/projected/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-kube-api-access-zvwk6\") pod \"redhat-operators-5f958\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:02 crc kubenswrapper[4911]: I0606 10:07:02.805796 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:03 crc kubenswrapper[4911]: I0606 10:07:03.512006 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5f958"] Jun 06 10:07:03 crc kubenswrapper[4911]: I0606 10:07:03.627068 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-zt646" event={"ID":"e4b0bcd3-501c-4105-bbdd-b5d024cfd788","Type":"ContainerStarted","Data":"5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7"} Jun 06 10:07:03 crc kubenswrapper[4911]: I0606 10:07:03.627167 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-zt646" event={"ID":"e4b0bcd3-501c-4105-bbdd-b5d024cfd788","Type":"ContainerStarted","Data":"8e6b048385ccec2f6fbbe1934acd9c85b2183e80349293e90723c2152c332ead"} Jun 06 10:07:03 crc kubenswrapper[4911]: I0606 10:07:03.628962 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5f958" event={"ID":"0a3096cf-3d45-4dd6-a45c-c1508d0bf869","Type":"ContainerStarted","Data":"f97866cd58c2a848686d29ed403827689111ab5988b7164e0a24516787c270da"} Jun 06 10:07:03 crc kubenswrapper[4911]: I0606 10:07:03.642036 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-zt646" podStartSLOduration=1.642015403 podStartE2EDuration="1.642015403s" podCreationTimestamp="2025-06-06 10:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:07:03.64036233 +0000 UTC m=+3234.915787883" watchObservedRunningTime="2025-06-06 10:07:03.642015403 +0000 UTC m=+3234.917440946" Jun 06 10:07:04 crc kubenswrapper[4911]: I0606 10:07:04.643376 4911 generic.go:334] "Generic (PLEG): container finished" podID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerID="590ae267b856a4516e3ba0b30c5964ed575661b3daaab4c97d860b1a510d0fa2" exitCode=0 Jun 06 10:07:04 crc kubenswrapper[4911]: I0606 10:07:04.643643 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5f958" event={"ID":"0a3096cf-3d45-4dd6-a45c-c1508d0bf869","Type":"ContainerDied","Data":"590ae267b856a4516e3ba0b30c5964ed575661b3daaab4c97d860b1a510d0fa2"} Jun 06 10:07:06 crc kubenswrapper[4911]: I0606 10:07:06.663010 4911 generic.go:334] "Generic (PLEG): container finished" podID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerID="6e93c390adc4078fadf1e7bfafcef9fc527e9e7ca30daa5bd88670c248b7e47d" exitCode=0 Jun 06 10:07:06 crc kubenswrapper[4911]: I0606 10:07:06.663073 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5f958" event={"ID":"0a3096cf-3d45-4dd6-a45c-c1508d0bf869","Type":"ContainerDied","Data":"6e93c390adc4078fadf1e7bfafcef9fc527e9e7ca30daa5bd88670c248b7e47d"} Jun 06 10:07:08 crc kubenswrapper[4911]: I0606 10:07:08.693584 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5f958" event={"ID":"0a3096cf-3d45-4dd6-a45c-c1508d0bf869","Type":"ContainerStarted","Data":"54f9d3c6d3c73f3e49f57a1d2f58aa19d36615e8032980a1634c2669b69bd885"} Jun 06 10:07:08 crc kubenswrapper[4911]: I0606 10:07:08.714624 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5f958" podStartSLOduration=4.053047931 podStartE2EDuration="6.714605832s" podCreationTimestamp="2025-06-06 10:07:02 +0000 UTC" firstStartedPulling="2025-06-06 10:07:04.64535656 +0000 UTC m=+3235.920782103" lastFinishedPulling="2025-06-06 10:07:07.306914461 +0000 UTC m=+3238.582340004" observedRunningTime="2025-06-06 10:07:08.710730081 +0000 UTC m=+3239.986155624" watchObservedRunningTime="2025-06-06 10:07:08.714605832 +0000 UTC m=+3239.990031375" Jun 06 10:07:12 crc kubenswrapper[4911]: I0606 10:07:12.806644 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:12 crc kubenswrapper[4911]: I0606 10:07:12.807148 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:12 crc kubenswrapper[4911]: I0606 10:07:12.857739 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.388228 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-zt646"] Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.388632 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-zt646" podUID="e4b0bcd3-501c-4105-bbdd-b5d024cfd788" containerName="container-00" containerID="cri-o://5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7" gracePeriod=2 Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.395766 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-zt646"] Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.592148 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zt646" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.737540 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-host\") pod \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.737695 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-host" (OuterVolumeSpecName: "host") pod "e4b0bcd3-501c-4105-bbdd-b5d024cfd788" (UID: "e4b0bcd3-501c-4105-bbdd-b5d024cfd788"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.737812 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92k2l\" (UniqueName: \"kubernetes.io/projected/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-kube-api-access-92k2l\") pod \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\" (UID: \"e4b0bcd3-501c-4105-bbdd-b5d024cfd788\") " Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.738800 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.740874 4911 generic.go:334] "Generic (PLEG): container finished" podID="e4b0bcd3-501c-4105-bbdd-b5d024cfd788" containerID="5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7" exitCode=0 Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.740956 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-zt646" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.741009 4911 scope.go:117] "RemoveContainer" containerID="5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.747891 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-kube-api-access-92k2l" (OuterVolumeSpecName: "kube-api-access-92k2l") pod "e4b0bcd3-501c-4105-bbdd-b5d024cfd788" (UID: "e4b0bcd3-501c-4105-bbdd-b5d024cfd788"). InnerVolumeSpecName "kube-api-access-92k2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.800271 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.808242 4911 scope.go:117] "RemoveContainer" containerID="5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7" Jun 06 10:07:13 crc kubenswrapper[4911]: E0606 10:07:13.808853 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7\": container with ID starting with 5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7 not found: ID does not exist" containerID="5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.808917 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7"} err="failed to get container status \"5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7\": rpc error: code = NotFound desc = could not find container \"5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7\": container with ID starting with 5f0bba7973bfca8eaeb06fd0a736ea9e6e842411bab9a0e932aa551cb8d58fe7 not found: ID does not exist" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.845775 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92k2l\" (UniqueName: \"kubernetes.io/projected/e4b0bcd3-501c-4105-bbdd-b5d024cfd788-kube-api-access-92k2l\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.861183 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5f958"] Jun 06 10:07:13 crc kubenswrapper[4911]: I0606 10:07:13.960079 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4b0bcd3-501c-4105-bbdd-b5d024cfd788" path="/var/lib/kubelet/pods/e4b0bcd3-501c-4105-bbdd-b5d024cfd788/volumes" Jun 06 10:07:15 crc kubenswrapper[4911]: I0606 10:07:15.760770 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5f958" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="registry-server" containerID="cri-o://54f9d3c6d3c73f3e49f57a1d2f58aa19d36615e8032980a1634c2669b69bd885" gracePeriod=2 Jun 06 10:07:16 crc kubenswrapper[4911]: I0606 10:07:16.772945 4911 generic.go:334] "Generic (PLEG): container finished" podID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerID="54f9d3c6d3c73f3e49f57a1d2f58aa19d36615e8032980a1634c2669b69bd885" exitCode=0 Jun 06 10:07:16 crc kubenswrapper[4911]: I0606 10:07:16.773242 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5f958" event={"ID":"0a3096cf-3d45-4dd6-a45c-c1508d0bf869","Type":"ContainerDied","Data":"54f9d3c6d3c73f3e49f57a1d2f58aa19d36615e8032980a1634c2669b69bd885"} Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.145282 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.348899 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvwk6\" (UniqueName: \"kubernetes.io/projected/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-kube-api-access-zvwk6\") pod \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.349058 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-utilities\") pod \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.349247 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-catalog-content\") pod \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\" (UID: \"0a3096cf-3d45-4dd6-a45c-c1508d0bf869\") " Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.350148 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-utilities" (OuterVolumeSpecName: "utilities") pod "0a3096cf-3d45-4dd6-a45c-c1508d0bf869" (UID: "0a3096cf-3d45-4dd6-a45c-c1508d0bf869"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.368938 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-kube-api-access-zvwk6" (OuterVolumeSpecName: "kube-api-access-zvwk6") pod "0a3096cf-3d45-4dd6-a45c-c1508d0bf869" (UID: "0a3096cf-3d45-4dd6-a45c-c1508d0bf869"). InnerVolumeSpecName "kube-api-access-zvwk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.412317 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0a3096cf-3d45-4dd6-a45c-c1508d0bf869" (UID: "0a3096cf-3d45-4dd6-a45c-c1508d0bf869"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.451450 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvwk6\" (UniqueName: \"kubernetes.io/projected/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-kube-api-access-zvwk6\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.451489 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.451504 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a3096cf-3d45-4dd6-a45c-c1508d0bf869-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.783601 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5f958" event={"ID":"0a3096cf-3d45-4dd6-a45c-c1508d0bf869","Type":"ContainerDied","Data":"f97866cd58c2a848686d29ed403827689111ab5988b7164e0a24516787c270da"} Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.783713 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5f958" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.783719 4911 scope.go:117] "RemoveContainer" containerID="54f9d3c6d3c73f3e49f57a1d2f58aa19d36615e8032980a1634c2669b69bd885" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.815708 4911 scope.go:117] "RemoveContainer" containerID="6e93c390adc4078fadf1e7bfafcef9fc527e9e7ca30daa5bd88670c248b7e47d" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.817415 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5f958"] Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.825839 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5f958"] Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.841713 4911 scope.go:117] "RemoveContainer" containerID="590ae267b856a4516e3ba0b30c5964ed575661b3daaab4c97d860b1a510d0fa2" Jun 06 10:07:17 crc kubenswrapper[4911]: I0606 10:07:17.960558 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" path="/var/lib/kubelet/pods/0a3096cf-3d45-4dd6-a45c-c1508d0bf869/volumes" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.933728 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fjmd5"] Jun 06 10:07:40 crc kubenswrapper[4911]: E0606 10:07:40.934724 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="extract-content" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.934746 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="extract-content" Jun 06 10:07:40 crc kubenswrapper[4911]: E0606 10:07:40.934760 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4b0bcd3-501c-4105-bbdd-b5d024cfd788" containerName="container-00" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.934768 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4b0bcd3-501c-4105-bbdd-b5d024cfd788" containerName="container-00" Jun 06 10:07:40 crc kubenswrapper[4911]: E0606 10:07:40.934783 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="registry-server" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.934791 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="registry-server" Jun 06 10:07:40 crc kubenswrapper[4911]: E0606 10:07:40.934815 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="extract-utilities" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.934822 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="extract-utilities" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.935066 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4b0bcd3-501c-4105-bbdd-b5d024cfd788" containerName="container-00" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.935086 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a3096cf-3d45-4dd6-a45c-c1508d0bf869" containerName="registry-server" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.936846 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.940498 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-catalog-content\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.940559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-utilities\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.940675 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhfw9\" (UniqueName: \"kubernetes.io/projected/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-kube-api-access-xhfw9\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:40 crc kubenswrapper[4911]: I0606 10:07:40.944864 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fjmd5"] Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.042241 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhfw9\" (UniqueName: \"kubernetes.io/projected/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-kube-api-access-xhfw9\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.042397 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-catalog-content\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.042448 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-utilities\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.043145 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-catalog-content\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.043363 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-utilities\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.065221 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhfw9\" (UniqueName: \"kubernetes.io/projected/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-kube-api-access-xhfw9\") pod \"certified-operators-fjmd5\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.261555 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.931468 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fjmd5"] Jun 06 10:07:41 crc kubenswrapper[4911]: I0606 10:07:41.997544 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fjmd5" event={"ID":"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15","Type":"ContainerStarted","Data":"0f381a50e703ea9d462a9cbd496e0a2d3c64e5a6ae671fdac74cb6f01dfc771b"} Jun 06 10:07:43 crc kubenswrapper[4911]: I0606 10:07:43.009381 4911 generic.go:334] "Generic (PLEG): container finished" podID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerID="14877ae6e9489e1b1564de1842dd2440a322f75dccb3273846f0f82886d48339" exitCode=0 Jun 06 10:07:43 crc kubenswrapper[4911]: I0606 10:07:43.009496 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fjmd5" event={"ID":"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15","Type":"ContainerDied","Data":"14877ae6e9489e1b1564de1842dd2440a322f75dccb3273846f0f82886d48339"} Jun 06 10:07:45 crc kubenswrapper[4911]: I0606 10:07:45.032927 4911 generic.go:334] "Generic (PLEG): container finished" podID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerID="9fea252b326f3de8da995b2cb684c5861e04b38043a12aa9cd25a19a3f4c87fc" exitCode=0 Jun 06 10:07:45 crc kubenswrapper[4911]: I0606 10:07:45.033001 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fjmd5" event={"ID":"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15","Type":"ContainerDied","Data":"9fea252b326f3de8da995b2cb684c5861e04b38043a12aa9cd25a19a3f4c87fc"} Jun 06 10:07:46 crc kubenswrapper[4911]: I0606 10:07:46.046121 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fjmd5" event={"ID":"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15","Type":"ContainerStarted","Data":"fc010892836f6281a0ca7e442dbae18a0dd3341119bbca2eb9e82752d39f3c25"} Jun 06 10:07:46 crc kubenswrapper[4911]: I0606 10:07:46.067700 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fjmd5" podStartSLOduration=3.66409194 podStartE2EDuration="6.067668156s" podCreationTimestamp="2025-06-06 10:07:40 +0000 UTC" firstStartedPulling="2025-06-06 10:07:43.012626104 +0000 UTC m=+3274.288051677" lastFinishedPulling="2025-06-06 10:07:45.41620235 +0000 UTC m=+3276.691627893" observedRunningTime="2025-06-06 10:07:46.061460725 +0000 UTC m=+3277.336886278" watchObservedRunningTime="2025-06-06 10:07:46.067668156 +0000 UTC m=+3277.343093719" Jun 06 10:07:51 crc kubenswrapper[4911]: I0606 10:07:51.262436 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:51 crc kubenswrapper[4911]: I0606 10:07:51.264391 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:51 crc kubenswrapper[4911]: I0606 10:07:51.317539 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:52 crc kubenswrapper[4911]: I0606 10:07:52.144675 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:52 crc kubenswrapper[4911]: I0606 10:07:52.192911 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fjmd5"] Jun 06 10:07:54 crc kubenswrapper[4911]: I0606 10:07:54.115468 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fjmd5" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="registry-server" containerID="cri-o://fc010892836f6281a0ca7e442dbae18a0dd3341119bbca2eb9e82752d39f3c25" gracePeriod=2 Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.129263 4911 generic.go:334] "Generic (PLEG): container finished" podID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerID="fc010892836f6281a0ca7e442dbae18a0dd3341119bbca2eb9e82752d39f3c25" exitCode=0 Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.129340 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fjmd5" event={"ID":"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15","Type":"ContainerDied","Data":"fc010892836f6281a0ca7e442dbae18a0dd3341119bbca2eb9e82752d39f3c25"} Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.129972 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fjmd5" event={"ID":"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15","Type":"ContainerDied","Data":"0f381a50e703ea9d462a9cbd496e0a2d3c64e5a6ae671fdac74cb6f01dfc771b"} Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.129990 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f381a50e703ea9d462a9cbd496e0a2d3c64e5a6ae671fdac74cb6f01dfc771b" Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.212252 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.344006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-catalog-content\") pod \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.344182 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhfw9\" (UniqueName: \"kubernetes.io/projected/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-kube-api-access-xhfw9\") pod \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.344232 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-utilities\") pod \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\" (UID: \"e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15\") " Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.345220 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-utilities" (OuterVolumeSpecName: "utilities") pod "e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" (UID: "e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.350208 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-kube-api-access-xhfw9" (OuterVolumeSpecName: "kube-api-access-xhfw9") pod "e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" (UID: "e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15"). InnerVolumeSpecName "kube-api-access-xhfw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.376731 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" (UID: "e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.446826 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhfw9\" (UniqueName: \"kubernetes.io/projected/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-kube-api-access-xhfw9\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.446862 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:55 crc kubenswrapper[4911]: I0606 10:07:55.446872 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:07:56 crc kubenswrapper[4911]: I0606 10:07:56.139868 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fjmd5" Jun 06 10:07:56 crc kubenswrapper[4911]: I0606 10:07:56.178128 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fjmd5"] Jun 06 10:07:56 crc kubenswrapper[4911]: I0606 10:07:56.188226 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fjmd5"] Jun 06 10:07:57 crc kubenswrapper[4911]: I0606 10:07:57.959666 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" path="/var/lib/kubelet/pods/e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15/volumes" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.757047 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-dt7wh"] Jun 06 10:08:01 crc kubenswrapper[4911]: E0606 10:08:01.757922 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="extract-utilities" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.757998 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="extract-utilities" Jun 06 10:08:01 crc kubenswrapper[4911]: E0606 10:08:01.758025 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="registry-server" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.758033 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="registry-server" Jun 06 10:08:01 crc kubenswrapper[4911]: E0606 10:08:01.758046 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="extract-content" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.758054 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="extract-content" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.758328 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9bbe54f-abd1-4a00-bd9e-2de4dfa31a15" containerName="registry-server" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.758941 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dt7wh" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.885215 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkh8m\" (UniqueName: \"kubernetes.io/projected/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-kube-api-access-xkh8m\") pod \"crc-debug-dt7wh\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " pod="openstack/crc-debug-dt7wh" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.885287 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-host\") pod \"crc-debug-dt7wh\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " pod="openstack/crc-debug-dt7wh" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.987795 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkh8m\" (UniqueName: \"kubernetes.io/projected/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-kube-api-access-xkh8m\") pod \"crc-debug-dt7wh\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " pod="openstack/crc-debug-dt7wh" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.987839 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-host\") pod \"crc-debug-dt7wh\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " pod="openstack/crc-debug-dt7wh" Jun 06 10:08:01 crc kubenswrapper[4911]: I0606 10:08:01.987982 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-host\") pod \"crc-debug-dt7wh\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " pod="openstack/crc-debug-dt7wh" Jun 06 10:08:02 crc kubenswrapper[4911]: I0606 10:08:02.011107 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkh8m\" (UniqueName: \"kubernetes.io/projected/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-kube-api-access-xkh8m\") pod \"crc-debug-dt7wh\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " pod="openstack/crc-debug-dt7wh" Jun 06 10:08:02 crc kubenswrapper[4911]: I0606 10:08:02.081302 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dt7wh" Jun 06 10:08:02 crc kubenswrapper[4911]: I0606 10:08:02.190695 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dt7wh" event={"ID":"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c","Type":"ContainerStarted","Data":"8920709b56616e78023ba27559b23f007ce36a18c0bac363dbc75d5eaa2e3227"} Jun 06 10:08:03 crc kubenswrapper[4911]: I0606 10:08:03.200515 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dt7wh" event={"ID":"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c","Type":"ContainerStarted","Data":"d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3"} Jun 06 10:08:03 crc kubenswrapper[4911]: I0606 10:08:03.219615 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-dt7wh" podStartSLOduration=2.219560568 podStartE2EDuration="2.219560568s" podCreationTimestamp="2025-06-06 10:08:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:08:03.213755427 +0000 UTC m=+3294.489180980" watchObservedRunningTime="2025-06-06 10:08:03.219560568 +0000 UTC m=+3294.494986131" Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.594587 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-dt7wh"] Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.595471 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-dt7wh" podUID="27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" containerName="container-00" containerID="cri-o://d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3" gracePeriod=2 Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.602379 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-dt7wh"] Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.712398 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dt7wh" Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.798185 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkh8m\" (UniqueName: \"kubernetes.io/projected/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-kube-api-access-xkh8m\") pod \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.798540 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-host\") pod \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\" (UID: \"27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c\") " Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.798683 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-host" (OuterVolumeSpecName: "host") pod "27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" (UID: "27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.799067 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.803836 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-kube-api-access-xkh8m" (OuterVolumeSpecName: "kube-api-access-xkh8m") pod "27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" (UID: "27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c"). InnerVolumeSpecName "kube-api-access-xkh8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:08:12 crc kubenswrapper[4911]: I0606 10:08:12.901432 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkh8m\" (UniqueName: \"kubernetes.io/projected/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c-kube-api-access-xkh8m\") on node \"crc\" DevicePath \"\"" Jun 06 10:08:13 crc kubenswrapper[4911]: I0606 10:08:13.300630 4911 generic.go:334] "Generic (PLEG): container finished" podID="27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" containerID="d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3" exitCode=0 Jun 06 10:08:13 crc kubenswrapper[4911]: I0606 10:08:13.300695 4911 scope.go:117] "RemoveContainer" containerID="d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3" Jun 06 10:08:13 crc kubenswrapper[4911]: I0606 10:08:13.300710 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dt7wh" Jun 06 10:08:13 crc kubenswrapper[4911]: I0606 10:08:13.323473 4911 scope.go:117] "RemoveContainer" containerID="d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3" Jun 06 10:08:13 crc kubenswrapper[4911]: E0606 10:08:13.323910 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3\": container with ID starting with d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3 not found: ID does not exist" containerID="d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3" Jun 06 10:08:13 crc kubenswrapper[4911]: I0606 10:08:13.323947 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3"} err="failed to get container status \"d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3\": rpc error: code = NotFound desc = could not find container \"d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3\": container with ID starting with d61eb72a088c73c235fd3319e595e6d46496eb50bac3e763594eedaa148af8f3 not found: ID does not exist" Jun 06 10:08:13 crc kubenswrapper[4911]: I0606 10:08:13.958727 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" path="/var/lib/kubelet/pods/27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c/volumes" Jun 06 10:08:54 crc kubenswrapper[4911]: I0606 10:08:54.300251 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:08:54 crc kubenswrapper[4911]: I0606 10:08:54.300828 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.034277 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-xdd5z"] Jun 06 10:09:02 crc kubenswrapper[4911]: E0606 10:09:02.035393 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" containerName="container-00" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.035412 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" containerName="container-00" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.035710 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="27bbb70c-cd3c-4eb6-95ac-fd99e7853d3c" containerName="container-00" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.041693 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.151324 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63d67a1-1993-4171-bfba-01155cf75202-host\") pod \"crc-debug-xdd5z\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.151589 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6bn5\" (UniqueName: \"kubernetes.io/projected/e63d67a1-1993-4171-bfba-01155cf75202-kube-api-access-v6bn5\") pod \"crc-debug-xdd5z\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.253693 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63d67a1-1993-4171-bfba-01155cf75202-host\") pod \"crc-debug-xdd5z\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.253827 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6bn5\" (UniqueName: \"kubernetes.io/projected/e63d67a1-1993-4171-bfba-01155cf75202-kube-api-access-v6bn5\") pod \"crc-debug-xdd5z\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.253864 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63d67a1-1993-4171-bfba-01155cf75202-host\") pod \"crc-debug-xdd5z\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.271872 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6bn5\" (UniqueName: \"kubernetes.io/projected/e63d67a1-1993-4171-bfba-01155cf75202-kube-api-access-v6bn5\") pod \"crc-debug-xdd5z\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.362917 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xdd5z" Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.759078 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-xdd5z" event={"ID":"e63d67a1-1993-4171-bfba-01155cf75202","Type":"ContainerStarted","Data":"73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9"} Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.759417 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-xdd5z" event={"ID":"e63d67a1-1993-4171-bfba-01155cf75202","Type":"ContainerStarted","Data":"5bf993bfaef0e27dc3da90400653853db6e62a7fdc6ea3137c724e1f8a8b443f"} Jun 06 10:09:02 crc kubenswrapper[4911]: I0606 10:09:02.775557 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-xdd5z" podStartSLOduration=0.775538424 podStartE2EDuration="775.538424ms" podCreationTimestamp="2025-06-06 10:09:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:09:02.770812451 +0000 UTC m=+3354.046238014" watchObservedRunningTime="2025-06-06 10:09:02.775538424 +0000 UTC m=+3354.050963967" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.019964 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-xdd5z"] Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.020895 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-xdd5z" podUID="e63d67a1-1993-4171-bfba-01155cf75202" containerName="container-00" containerID="cri-o://73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9" gracePeriod=2 Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.034536 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-xdd5z"] Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.123810 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xdd5z" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.170474 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6bn5\" (UniqueName: \"kubernetes.io/projected/e63d67a1-1993-4171-bfba-01155cf75202-kube-api-access-v6bn5\") pod \"e63d67a1-1993-4171-bfba-01155cf75202\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.170763 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63d67a1-1993-4171-bfba-01155cf75202-host\") pod \"e63d67a1-1993-4171-bfba-01155cf75202\" (UID: \"e63d67a1-1993-4171-bfba-01155cf75202\") " Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.170845 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e63d67a1-1993-4171-bfba-01155cf75202-host" (OuterVolumeSpecName: "host") pod "e63d67a1-1993-4171-bfba-01155cf75202" (UID: "e63d67a1-1993-4171-bfba-01155cf75202"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.171288 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63d67a1-1993-4171-bfba-01155cf75202-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.191451 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e63d67a1-1993-4171-bfba-01155cf75202-kube-api-access-v6bn5" (OuterVolumeSpecName: "kube-api-access-v6bn5") pod "e63d67a1-1993-4171-bfba-01155cf75202" (UID: "e63d67a1-1993-4171-bfba-01155cf75202"). InnerVolumeSpecName "kube-api-access-v6bn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.273228 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6bn5\" (UniqueName: \"kubernetes.io/projected/e63d67a1-1993-4171-bfba-01155cf75202-kube-api-access-v6bn5\") on node \"crc\" DevicePath \"\"" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.868045 4911 generic.go:334] "Generic (PLEG): container finished" podID="e63d67a1-1993-4171-bfba-01155cf75202" containerID="73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9" exitCode=0 Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.868144 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-xdd5z" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.868151 4911 scope.go:117] "RemoveContainer" containerID="73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.902738 4911 scope.go:117] "RemoveContainer" containerID="73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9" Jun 06 10:09:13 crc kubenswrapper[4911]: E0606 10:09:13.904197 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9\": container with ID starting with 73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9 not found: ID does not exist" containerID="73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.904245 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9"} err="failed to get container status \"73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9\": rpc error: code = NotFound desc = could not find container \"73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9\": container with ID starting with 73a5262975cded4fbfb8b9d11a4db2c91fe3941cb8cdd4f50a6fce306e581ae9 not found: ID does not exist" Jun 06 10:09:13 crc kubenswrapper[4911]: I0606 10:09:13.973453 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e63d67a1-1993-4171-bfba-01155cf75202" path="/var/lib/kubelet/pods/e63d67a1-1993-4171-bfba-01155cf75202/volumes" Jun 06 10:09:24 crc kubenswrapper[4911]: I0606 10:09:24.300043 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:09:24 crc kubenswrapper[4911]: I0606 10:09:24.300700 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:09:54 crc kubenswrapper[4911]: I0606 10:09:54.300124 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:09:54 crc kubenswrapper[4911]: I0606 10:09:54.300912 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:09:54 crc kubenswrapper[4911]: I0606 10:09:54.300958 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:09:54 crc kubenswrapper[4911]: I0606 10:09:54.301738 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5eaedf378a467801de1d31fb5b2d7f81648dcedca173908f5c9a7a95a8b2ce65"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:09:54 crc kubenswrapper[4911]: I0606 10:09:54.301795 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://5eaedf378a467801de1d31fb5b2d7f81648dcedca173908f5c9a7a95a8b2ce65" gracePeriod=600 Jun 06 10:09:55 crc kubenswrapper[4911]: I0606 10:09:55.249270 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="5eaedf378a467801de1d31fb5b2d7f81648dcedca173908f5c9a7a95a8b2ce65" exitCode=0 Jun 06 10:09:55 crc kubenswrapper[4911]: I0606 10:09:55.249348 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"5eaedf378a467801de1d31fb5b2d7f81648dcedca173908f5c9a7a95a8b2ce65"} Jun 06 10:09:55 crc kubenswrapper[4911]: I0606 10:09:55.250128 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb"} Jun 06 10:09:55 crc kubenswrapper[4911]: I0606 10:09:55.250137 4911 scope.go:117] "RemoveContainer" containerID="62db9cbac0df9b67c3a76f1d4292eb636ca824087418898418c781e7e5cf95ac" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.425467 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-7k5pr"] Jun 06 10:10:01 crc kubenswrapper[4911]: E0606 10:10:01.426439 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e63d67a1-1993-4171-bfba-01155cf75202" containerName="container-00" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.426455 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e63d67a1-1993-4171-bfba-01155cf75202" containerName="container-00" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.426671 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e63d67a1-1993-4171-bfba-01155cf75202" containerName="container-00" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.427313 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.501876 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5nng\" (UniqueName: \"kubernetes.io/projected/a0774b37-2709-4b32-9744-116fc1f487f6-kube-api-access-k5nng\") pod \"crc-debug-7k5pr\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.502726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0774b37-2709-4b32-9744-116fc1f487f6-host\") pod \"crc-debug-7k5pr\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.604796 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0774b37-2709-4b32-9744-116fc1f487f6-host\") pod \"crc-debug-7k5pr\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.604941 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0774b37-2709-4b32-9744-116fc1f487f6-host\") pod \"crc-debug-7k5pr\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.605316 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5nng\" (UniqueName: \"kubernetes.io/projected/a0774b37-2709-4b32-9744-116fc1f487f6-kube-api-access-k5nng\") pod \"crc-debug-7k5pr\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.626718 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5nng\" (UniqueName: \"kubernetes.io/projected/a0774b37-2709-4b32-9744-116fc1f487f6-kube-api-access-k5nng\") pod \"crc-debug-7k5pr\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: I0606 10:10:01.760585 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7k5pr" Jun 06 10:10:01 crc kubenswrapper[4911]: W0606 10:10:01.805991 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0774b37_2709_4b32_9744_116fc1f487f6.slice/crio-082280f0b7e59ab15f9aa9b1d344f1294a769ce586a0c963a0e55ced3b11a5f9 WatchSource:0}: Error finding container 082280f0b7e59ab15f9aa9b1d344f1294a769ce586a0c963a0e55ced3b11a5f9: Status 404 returned error can't find the container with id 082280f0b7e59ab15f9aa9b1d344f1294a769ce586a0c963a0e55ced3b11a5f9 Jun 06 10:10:02 crc kubenswrapper[4911]: I0606 10:10:02.316431 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-7k5pr" event={"ID":"a0774b37-2709-4b32-9744-116fc1f487f6","Type":"ContainerStarted","Data":"1e55d751b1ea39d14f2e00aaf058c93d9ab99907c97346cca86e45c0a10a0c5d"} Jun 06 10:10:02 crc kubenswrapper[4911]: I0606 10:10:02.316681 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-7k5pr" event={"ID":"a0774b37-2709-4b32-9744-116fc1f487f6","Type":"ContainerStarted","Data":"082280f0b7e59ab15f9aa9b1d344f1294a769ce586a0c963a0e55ced3b11a5f9"} Jun 06 10:10:02 crc kubenswrapper[4911]: I0606 10:10:02.332324 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-7k5pr" podStartSLOduration=1.332309479 podStartE2EDuration="1.332309479s" podCreationTimestamp="2025-06-06 10:10:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:10:02.327900915 +0000 UTC m=+3413.603326498" watchObservedRunningTime="2025-06-06 10:10:02.332309479 +0000 UTC m=+3413.607735022" Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.366008 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-7k5pr"] Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.367013 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-7k5pr" podUID="a0774b37-2709-4b32-9744-116fc1f487f6" containerName="container-00" containerID="cri-o://1e55d751b1ea39d14f2e00aaf058c93d9ab99907c97346cca86e45c0a10a0c5d" gracePeriod=2 Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.392342 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-7k5pr"] Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.425455 4911 generic.go:334] "Generic (PLEG): container finished" podID="a0774b37-2709-4b32-9744-116fc1f487f6" containerID="1e55d751b1ea39d14f2e00aaf058c93d9ab99907c97346cca86e45c0a10a0c5d" exitCode=0 Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.425513 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="082280f0b7e59ab15f9aa9b1d344f1294a769ce586a0c963a0e55ced3b11a5f9" Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.503892 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7k5pr" Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.650434 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5nng\" (UniqueName: \"kubernetes.io/projected/a0774b37-2709-4b32-9744-116fc1f487f6-kube-api-access-k5nng\") pod \"a0774b37-2709-4b32-9744-116fc1f487f6\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.650721 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0774b37-2709-4b32-9744-116fc1f487f6-host\") pod \"a0774b37-2709-4b32-9744-116fc1f487f6\" (UID: \"a0774b37-2709-4b32-9744-116fc1f487f6\") " Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.650811 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0774b37-2709-4b32-9744-116fc1f487f6-host" (OuterVolumeSpecName: "host") pod "a0774b37-2709-4b32-9744-116fc1f487f6" (UID: "a0774b37-2709-4b32-9744-116fc1f487f6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.651145 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0774b37-2709-4b32-9744-116fc1f487f6-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.656302 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0774b37-2709-4b32-9744-116fc1f487f6-kube-api-access-k5nng" (OuterVolumeSpecName: "kube-api-access-k5nng") pod "a0774b37-2709-4b32-9744-116fc1f487f6" (UID: "a0774b37-2709-4b32-9744-116fc1f487f6"). InnerVolumeSpecName "kube-api-access-k5nng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:10:12 crc kubenswrapper[4911]: I0606 10:10:12.753580 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5nng\" (UniqueName: \"kubernetes.io/projected/a0774b37-2709-4b32-9744-116fc1f487f6-kube-api-access-k5nng\") on node \"crc\" DevicePath \"\"" Jun 06 10:10:13 crc kubenswrapper[4911]: I0606 10:10:13.434127 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7k5pr" Jun 06 10:10:13 crc kubenswrapper[4911]: I0606 10:10:13.957910 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0774b37-2709-4b32-9744-116fc1f487f6" path="/var/lib/kubelet/pods/a0774b37-2709-4b32-9744-116fc1f487f6/volumes" Jun 06 10:10:43 crc kubenswrapper[4911]: I0606 10:10:43.993246 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-56lmd"] Jun 06 10:10:43 crc kubenswrapper[4911]: E0606 10:10:43.994217 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0774b37-2709-4b32-9744-116fc1f487f6" containerName="container-00" Jun 06 10:10:43 crc kubenswrapper[4911]: I0606 10:10:43.994230 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0774b37-2709-4b32-9744-116fc1f487f6" containerName="container-00" Jun 06 10:10:43 crc kubenswrapper[4911]: I0606 10:10:43.994436 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0774b37-2709-4b32-9744-116fc1f487f6" containerName="container-00" Jun 06 10:10:43 crc kubenswrapper[4911]: I0606 10:10:43.995847 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.003729 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-56lmd"] Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.109154 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw98w\" (UniqueName: \"kubernetes.io/projected/8c9009af-32d8-475a-a30c-a135d827a915-kube-api-access-fw98w\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.109547 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-utilities\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.109630 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-catalog-content\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.211905 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw98w\" (UniqueName: \"kubernetes.io/projected/8c9009af-32d8-475a-a30c-a135d827a915-kube-api-access-fw98w\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.211997 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-utilities\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.212071 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-catalog-content\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.212963 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-utilities\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.213034 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-catalog-content\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.230915 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw98w\" (UniqueName: \"kubernetes.io/projected/8c9009af-32d8-475a-a30c-a135d827a915-kube-api-access-fw98w\") pod \"redhat-marketplace-56lmd\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.318001 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:44 crc kubenswrapper[4911]: I0606 10:10:44.983019 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-56lmd"] Jun 06 10:10:45 crc kubenswrapper[4911]: I0606 10:10:45.814861 4911 generic.go:334] "Generic (PLEG): container finished" podID="8c9009af-32d8-475a-a30c-a135d827a915" containerID="0e3ec5bafb15413a874d4c49145185ee62c863cdf65e46b089411b2322701f9e" exitCode=0 Jun 06 10:10:45 crc kubenswrapper[4911]: I0606 10:10:45.815140 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56lmd" event={"ID":"8c9009af-32d8-475a-a30c-a135d827a915","Type":"ContainerDied","Data":"0e3ec5bafb15413a874d4c49145185ee62c863cdf65e46b089411b2322701f9e"} Jun 06 10:10:45 crc kubenswrapper[4911]: I0606 10:10:45.815171 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56lmd" event={"ID":"8c9009af-32d8-475a-a30c-a135d827a915","Type":"ContainerStarted","Data":"2b9ca651d75aa495f8bb557eff0b341fe270cecb72fe61eddde688d63564cdee"} Jun 06 10:10:45 crc kubenswrapper[4911]: I0606 10:10:45.818542 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:10:47 crc kubenswrapper[4911]: I0606 10:10:47.832225 4911 generic.go:334] "Generic (PLEG): container finished" podID="8c9009af-32d8-475a-a30c-a135d827a915" containerID="54302884f5e2ac616c3c482209e79e206ef12063338f462b79247870f80ce145" exitCode=0 Jun 06 10:10:47 crc kubenswrapper[4911]: I0606 10:10:47.832319 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56lmd" event={"ID":"8c9009af-32d8-475a-a30c-a135d827a915","Type":"ContainerDied","Data":"54302884f5e2ac616c3c482209e79e206ef12063338f462b79247870f80ce145"} Jun 06 10:10:48 crc kubenswrapper[4911]: I0606 10:10:48.843410 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56lmd" event={"ID":"8c9009af-32d8-475a-a30c-a135d827a915","Type":"ContainerStarted","Data":"9c8f803d89075de9e0f1bdbb68c34e98316e114644a1b51aa84de2ccc85899a4"} Jun 06 10:10:48 crc kubenswrapper[4911]: I0606 10:10:48.869159 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-56lmd" podStartSLOduration=3.440330185 podStartE2EDuration="5.869136216s" podCreationTimestamp="2025-06-06 10:10:43 +0000 UTC" firstStartedPulling="2025-06-06 10:10:45.818317286 +0000 UTC m=+3457.093742829" lastFinishedPulling="2025-06-06 10:10:48.247123317 +0000 UTC m=+3459.522548860" observedRunningTime="2025-06-06 10:10:48.862757221 +0000 UTC m=+3460.138182774" watchObservedRunningTime="2025-06-06 10:10:48.869136216 +0000 UTC m=+3460.144561759" Jun 06 10:10:54 crc kubenswrapper[4911]: I0606 10:10:54.318219 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:54 crc kubenswrapper[4911]: I0606 10:10:54.318800 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:54 crc kubenswrapper[4911]: I0606 10:10:54.374254 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:54 crc kubenswrapper[4911]: I0606 10:10:54.979050 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:55 crc kubenswrapper[4911]: I0606 10:10:55.024324 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-56lmd"] Jun 06 10:10:56 crc kubenswrapper[4911]: I0606 10:10:56.960826 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-56lmd" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="registry-server" containerID="cri-o://9c8f803d89075de9e0f1bdbb68c34e98316e114644a1b51aa84de2ccc85899a4" gracePeriod=2 Jun 06 10:10:57 crc kubenswrapper[4911]: I0606 10:10:57.968836 4911 generic.go:334] "Generic (PLEG): container finished" podID="8c9009af-32d8-475a-a30c-a135d827a915" containerID="9c8f803d89075de9e0f1bdbb68c34e98316e114644a1b51aa84de2ccc85899a4" exitCode=0 Jun 06 10:10:57 crc kubenswrapper[4911]: I0606 10:10:57.968906 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56lmd" event={"ID":"8c9009af-32d8-475a-a30c-a135d827a915","Type":"ContainerDied","Data":"9c8f803d89075de9e0f1bdbb68c34e98316e114644a1b51aa84de2ccc85899a4"} Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.133904 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.204676 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-utilities\") pod \"8c9009af-32d8-475a-a30c-a135d827a915\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.204811 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-catalog-content\") pod \"8c9009af-32d8-475a-a30c-a135d827a915\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.204973 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw98w\" (UniqueName: \"kubernetes.io/projected/8c9009af-32d8-475a-a30c-a135d827a915-kube-api-access-fw98w\") pod \"8c9009af-32d8-475a-a30c-a135d827a915\" (UID: \"8c9009af-32d8-475a-a30c-a135d827a915\") " Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.205730 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-utilities" (OuterVolumeSpecName: "utilities") pod "8c9009af-32d8-475a-a30c-a135d827a915" (UID: "8c9009af-32d8-475a-a30c-a135d827a915"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.210460 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c9009af-32d8-475a-a30c-a135d827a915-kube-api-access-fw98w" (OuterVolumeSpecName: "kube-api-access-fw98w") pod "8c9009af-32d8-475a-a30c-a135d827a915" (UID: "8c9009af-32d8-475a-a30c-a135d827a915"). InnerVolumeSpecName "kube-api-access-fw98w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.217078 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c9009af-32d8-475a-a30c-a135d827a915" (UID: "8c9009af-32d8-475a-a30c-a135d827a915"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.306817 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.306860 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw98w\" (UniqueName: \"kubernetes.io/projected/8c9009af-32d8-475a-a30c-a135d827a915-kube-api-access-fw98w\") on node \"crc\" DevicePath \"\"" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.306873 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9009af-32d8-475a-a30c-a135d827a915-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.978987 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-56lmd" event={"ID":"8c9009af-32d8-475a-a30c-a135d827a915","Type":"ContainerDied","Data":"2b9ca651d75aa495f8bb557eff0b341fe270cecb72fe61eddde688d63564cdee"} Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.979398 4911 scope.go:117] "RemoveContainer" containerID="9c8f803d89075de9e0f1bdbb68c34e98316e114644a1b51aa84de2ccc85899a4" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.979077 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-56lmd" Jun 06 10:10:58 crc kubenswrapper[4911]: I0606 10:10:58.999712 4911 scope.go:117] "RemoveContainer" containerID="54302884f5e2ac616c3c482209e79e206ef12063338f462b79247870f80ce145" Jun 06 10:10:59 crc kubenswrapper[4911]: I0606 10:10:59.013621 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-56lmd"] Jun 06 10:10:59 crc kubenswrapper[4911]: I0606 10:10:59.027912 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-56lmd"] Jun 06 10:10:59 crc kubenswrapper[4911]: I0606 10:10:59.037221 4911 scope.go:117] "RemoveContainer" containerID="0e3ec5bafb15413a874d4c49145185ee62c863cdf65e46b089411b2322701f9e" Jun 06 10:10:59 crc kubenswrapper[4911]: I0606 10:10:59.959583 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c9009af-32d8-475a-a30c-a135d827a915" path="/var/lib/kubelet/pods/8c9009af-32d8-475a-a30c-a135d827a915/volumes" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.790486 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-94qgr"] Jun 06 10:11:01 crc kubenswrapper[4911]: E0606 10:11:01.791231 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="registry-server" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.791247 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="registry-server" Jun 06 10:11:01 crc kubenswrapper[4911]: E0606 10:11:01.791286 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="extract-content" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.791293 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="extract-content" Jun 06 10:11:01 crc kubenswrapper[4911]: E0606 10:11:01.791313 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="extract-utilities" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.791345 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="extract-utilities" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.791582 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c9009af-32d8-475a-a30c-a135d827a915" containerName="registry-server" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.792364 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94qgr" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.872806 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqd9j\" (UniqueName: \"kubernetes.io/projected/ae015cad-f99d-4c94-8874-42d041b8cb96-kube-api-access-cqd9j\") pod \"crc-debug-94qgr\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " pod="openstack/crc-debug-94qgr" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.873149 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae015cad-f99d-4c94-8874-42d041b8cb96-host\") pod \"crc-debug-94qgr\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " pod="openstack/crc-debug-94qgr" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.975471 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqd9j\" (UniqueName: \"kubernetes.io/projected/ae015cad-f99d-4c94-8874-42d041b8cb96-kube-api-access-cqd9j\") pod \"crc-debug-94qgr\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " pod="openstack/crc-debug-94qgr" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.975601 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae015cad-f99d-4c94-8874-42d041b8cb96-host\") pod \"crc-debug-94qgr\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " pod="openstack/crc-debug-94qgr" Jun 06 10:11:01 crc kubenswrapper[4911]: I0606 10:11:01.975748 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae015cad-f99d-4c94-8874-42d041b8cb96-host\") pod \"crc-debug-94qgr\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " pod="openstack/crc-debug-94qgr" Jun 06 10:11:02 crc kubenswrapper[4911]: I0606 10:11:02.003135 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqd9j\" (UniqueName: \"kubernetes.io/projected/ae015cad-f99d-4c94-8874-42d041b8cb96-kube-api-access-cqd9j\") pod \"crc-debug-94qgr\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " pod="openstack/crc-debug-94qgr" Jun 06 10:11:02 crc kubenswrapper[4911]: I0606 10:11:02.110945 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94qgr" Jun 06 10:11:03 crc kubenswrapper[4911]: I0606 10:11:03.025625 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-94qgr" event={"ID":"ae015cad-f99d-4c94-8874-42d041b8cb96","Type":"ContainerStarted","Data":"9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5"} Jun 06 10:11:03 crc kubenswrapper[4911]: I0606 10:11:03.026223 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-94qgr" event={"ID":"ae015cad-f99d-4c94-8874-42d041b8cb96","Type":"ContainerStarted","Data":"5b9dd9957b194ac68d9e91a5befacb780858aaaf73558a254887d4979b9c5547"} Jun 06 10:11:03 crc kubenswrapper[4911]: I0606 10:11:03.048295 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-94qgr" podStartSLOduration=2.048276306 podStartE2EDuration="2.048276306s" podCreationTimestamp="2025-06-06 10:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:11:03.042733092 +0000 UTC m=+3474.318158645" watchObservedRunningTime="2025-06-06 10:11:03.048276306 +0000 UTC m=+3474.323701849" Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.705743 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-94qgr"] Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.706491 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-94qgr" podUID="ae015cad-f99d-4c94-8874-42d041b8cb96" containerName="container-00" containerID="cri-o://9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5" gracePeriod=2 Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.712744 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-94qgr"] Jun 06 10:11:12 crc kubenswrapper[4911]: E0606 10:11:12.808492 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae015cad_f99d_4c94_8874_42d041b8cb96.slice/crio-conmon-9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5.scope\": RecentStats: unable to find data in memory cache]" Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.809440 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94qgr" Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.918456 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae015cad-f99d-4c94-8874-42d041b8cb96-host\") pod \"ae015cad-f99d-4c94-8874-42d041b8cb96\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.918571 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae015cad-f99d-4c94-8874-42d041b8cb96-host" (OuterVolumeSpecName: "host") pod "ae015cad-f99d-4c94-8874-42d041b8cb96" (UID: "ae015cad-f99d-4c94-8874-42d041b8cb96"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.918701 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqd9j\" (UniqueName: \"kubernetes.io/projected/ae015cad-f99d-4c94-8874-42d041b8cb96-kube-api-access-cqd9j\") pod \"ae015cad-f99d-4c94-8874-42d041b8cb96\" (UID: \"ae015cad-f99d-4c94-8874-42d041b8cb96\") " Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.919282 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae015cad-f99d-4c94-8874-42d041b8cb96-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:11:12 crc kubenswrapper[4911]: I0606 10:11:12.926475 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae015cad-f99d-4c94-8874-42d041b8cb96-kube-api-access-cqd9j" (OuterVolumeSpecName: "kube-api-access-cqd9j") pod "ae015cad-f99d-4c94-8874-42d041b8cb96" (UID: "ae015cad-f99d-4c94-8874-42d041b8cb96"). InnerVolumeSpecName "kube-api-access-cqd9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:11:13 crc kubenswrapper[4911]: I0606 10:11:13.021632 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqd9j\" (UniqueName: \"kubernetes.io/projected/ae015cad-f99d-4c94-8874-42d041b8cb96-kube-api-access-cqd9j\") on node \"crc\" DevicePath \"\"" Jun 06 10:11:13 crc kubenswrapper[4911]: I0606 10:11:13.119679 4911 generic.go:334] "Generic (PLEG): container finished" podID="ae015cad-f99d-4c94-8874-42d041b8cb96" containerID="9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5" exitCode=0 Jun 06 10:11:13 crc kubenswrapper[4911]: I0606 10:11:13.119789 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-94qgr" Jun 06 10:11:13 crc kubenswrapper[4911]: I0606 10:11:13.119751 4911 scope.go:117] "RemoveContainer" containerID="9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5" Jun 06 10:11:13 crc kubenswrapper[4911]: I0606 10:11:13.142405 4911 scope.go:117] "RemoveContainer" containerID="9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5" Jun 06 10:11:13 crc kubenswrapper[4911]: E0606 10:11:13.142906 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5\": container with ID starting with 9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5 not found: ID does not exist" containerID="9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5" Jun 06 10:11:13 crc kubenswrapper[4911]: I0606 10:11:13.142958 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5"} err="failed to get container status \"9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5\": rpc error: code = NotFound desc = could not find container \"9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5\": container with ID starting with 9519e0f0791037c8940a9faa54b0157d5272b7fe1e5f3dee6fd335ec209be7e5 not found: ID does not exist" Jun 06 10:11:13 crc kubenswrapper[4911]: I0606 10:11:13.960662 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae015cad-f99d-4c94-8874-42d041b8cb96" path="/var/lib/kubelet/pods/ae015cad-f99d-4c94-8874-42d041b8cb96/volumes" Jun 06 10:11:21 crc kubenswrapper[4911]: I0606 10:11:21.598427 4911 scope.go:117] "RemoveContainer" containerID="7f10344f140542a15c91d7478c866f9007fc11a8091f92dd205d77f970150d02" Jun 06 10:11:54 crc kubenswrapper[4911]: I0606 10:11:54.300769 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:11:54 crc kubenswrapper[4911]: I0606 10:11:54.301237 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.126851 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-x5pnc"] Jun 06 10:12:02 crc kubenswrapper[4911]: E0606 10:12:02.127953 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae015cad-f99d-4c94-8874-42d041b8cb96" containerName="container-00" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.127970 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae015cad-f99d-4c94-8874-42d041b8cb96" containerName="container-00" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.128195 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae015cad-f99d-4c94-8874-42d041b8cb96" containerName="container-00" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.129000 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.292418 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtlkx\" (UniqueName: \"kubernetes.io/projected/bdfae817-6a52-46b9-b371-0f0bd1a7701d-kube-api-access-gtlkx\") pod \"crc-debug-x5pnc\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.292619 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bdfae817-6a52-46b9-b371-0f0bd1a7701d-host\") pod \"crc-debug-x5pnc\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.394024 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bdfae817-6a52-46b9-b371-0f0bd1a7701d-host\") pod \"crc-debug-x5pnc\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.394166 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bdfae817-6a52-46b9-b371-0f0bd1a7701d-host\") pod \"crc-debug-x5pnc\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.394185 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtlkx\" (UniqueName: \"kubernetes.io/projected/bdfae817-6a52-46b9-b371-0f0bd1a7701d-kube-api-access-gtlkx\") pod \"crc-debug-x5pnc\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.414298 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtlkx\" (UniqueName: \"kubernetes.io/projected/bdfae817-6a52-46b9-b371-0f0bd1a7701d-kube-api-access-gtlkx\") pod \"crc-debug-x5pnc\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.457840 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-x5pnc" Jun 06 10:12:02 crc kubenswrapper[4911]: I0606 10:12:02.570509 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-x5pnc" event={"ID":"bdfae817-6a52-46b9-b371-0f0bd1a7701d","Type":"ContainerStarted","Data":"3fe497776cdb9534ec4304489a635aedaec8b9a01f42ce1f863dfeb6b8e3598f"} Jun 06 10:12:03 crc kubenswrapper[4911]: I0606 10:12:03.580584 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-x5pnc" event={"ID":"bdfae817-6a52-46b9-b371-0f0bd1a7701d","Type":"ContainerStarted","Data":"39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8"} Jun 06 10:12:03 crc kubenswrapper[4911]: I0606 10:12:03.597860 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-x5pnc" podStartSLOduration=1.5978434350000001 podStartE2EDuration="1.597843435s" podCreationTimestamp="2025-06-06 10:12:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:12:03.591995044 +0000 UTC m=+3534.867420587" watchObservedRunningTime="2025-06-06 10:12:03.597843435 +0000 UTC m=+3534.873268968" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.091033 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-x5pnc"] Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.091867 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-x5pnc" podUID="bdfae817-6a52-46b9-b371-0f0bd1a7701d" containerName="container-00" containerID="cri-o://39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8" gracePeriod=2 Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.101459 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-x5pnc"] Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.203419 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-x5pnc" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.312995 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtlkx\" (UniqueName: \"kubernetes.io/projected/bdfae817-6a52-46b9-b371-0f0bd1a7701d-kube-api-access-gtlkx\") pod \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.313160 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bdfae817-6a52-46b9-b371-0f0bd1a7701d-host\") pod \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\" (UID: \"bdfae817-6a52-46b9-b371-0f0bd1a7701d\") " Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.313243 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bdfae817-6a52-46b9-b371-0f0bd1a7701d-host" (OuterVolumeSpecName: "host") pod "bdfae817-6a52-46b9-b371-0f0bd1a7701d" (UID: "bdfae817-6a52-46b9-b371-0f0bd1a7701d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.313705 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bdfae817-6a52-46b9-b371-0f0bd1a7701d-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.319310 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdfae817-6a52-46b9-b371-0f0bd1a7701d-kube-api-access-gtlkx" (OuterVolumeSpecName: "kube-api-access-gtlkx") pod "bdfae817-6a52-46b9-b371-0f0bd1a7701d" (UID: "bdfae817-6a52-46b9-b371-0f0bd1a7701d"). InnerVolumeSpecName "kube-api-access-gtlkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.416192 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtlkx\" (UniqueName: \"kubernetes.io/projected/bdfae817-6a52-46b9-b371-0f0bd1a7701d-kube-api-access-gtlkx\") on node \"crc\" DevicePath \"\"" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.677076 4911 generic.go:334] "Generic (PLEG): container finished" podID="bdfae817-6a52-46b9-b371-0f0bd1a7701d" containerID="39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8" exitCode=0 Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.677189 4911 scope.go:117] "RemoveContainer" containerID="39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.677365 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-x5pnc" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.716999 4911 scope.go:117] "RemoveContainer" containerID="39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8" Jun 06 10:12:13 crc kubenswrapper[4911]: E0606 10:12:13.717453 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8\": container with ID starting with 39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8 not found: ID does not exist" containerID="39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.717486 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8"} err="failed to get container status \"39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8\": rpc error: code = NotFound desc = could not find container \"39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8\": container with ID starting with 39d15e073edfa03e730395f43bf8e5520ca45a9502bce48ac1e419531a6632c8 not found: ID does not exist" Jun 06 10:12:13 crc kubenswrapper[4911]: I0606 10:12:13.962306 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdfae817-6a52-46b9-b371-0f0bd1a7701d" path="/var/lib/kubelet/pods/bdfae817-6a52-46b9-b371-0f0bd1a7701d/volumes" Jun 06 10:12:24 crc kubenswrapper[4911]: I0606 10:12:24.301647 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:12:24 crc kubenswrapper[4911]: I0606 10:12:24.302234 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:12:54 crc kubenswrapper[4911]: I0606 10:12:54.300564 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:12:54 crc kubenswrapper[4911]: I0606 10:12:54.301176 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:12:54 crc kubenswrapper[4911]: I0606 10:12:54.301226 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:12:54 crc kubenswrapper[4911]: I0606 10:12:54.302115 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:12:54 crc kubenswrapper[4911]: I0606 10:12:54.302178 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" gracePeriod=600 Jun 06 10:12:54 crc kubenswrapper[4911]: E0606 10:12:54.424126 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:12:55 crc kubenswrapper[4911]: I0606 10:12:55.074501 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" exitCode=0 Jun 06 10:12:55 crc kubenswrapper[4911]: I0606 10:12:55.074614 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb"} Jun 06 10:12:55 crc kubenswrapper[4911]: I0606 10:12:55.074971 4911 scope.go:117] "RemoveContainer" containerID="5eaedf378a467801de1d31fb5b2d7f81648dcedca173908f5c9a7a95a8b2ce65" Jun 06 10:12:55 crc kubenswrapper[4911]: I0606 10:12:55.076211 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:12:55 crc kubenswrapper[4911]: E0606 10:12:55.076804 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.411841 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-mznd9"] Jun 06 10:13:01 crc kubenswrapper[4911]: E0606 10:13:01.412982 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdfae817-6a52-46b9-b371-0f0bd1a7701d" containerName="container-00" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.413001 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdfae817-6a52-46b9-b371-0f0bd1a7701d" containerName="container-00" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.413283 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdfae817-6a52-46b9-b371-0f0bd1a7701d" containerName="container-00" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.414031 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mznd9" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.526334 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae946fc1-b2ca-451e-8490-42d79a131773-host\") pod \"crc-debug-mznd9\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " pod="openstack/crc-debug-mznd9" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.526432 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6qgr\" (UniqueName: \"kubernetes.io/projected/ae946fc1-b2ca-451e-8490-42d79a131773-kube-api-access-k6qgr\") pod \"crc-debug-mznd9\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " pod="openstack/crc-debug-mznd9" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.628467 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6qgr\" (UniqueName: \"kubernetes.io/projected/ae946fc1-b2ca-451e-8490-42d79a131773-kube-api-access-k6qgr\") pod \"crc-debug-mznd9\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " pod="openstack/crc-debug-mznd9" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.628672 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae946fc1-b2ca-451e-8490-42d79a131773-host\") pod \"crc-debug-mznd9\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " pod="openstack/crc-debug-mznd9" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.628744 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae946fc1-b2ca-451e-8490-42d79a131773-host\") pod \"crc-debug-mznd9\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " pod="openstack/crc-debug-mznd9" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.647190 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6qgr\" (UniqueName: \"kubernetes.io/projected/ae946fc1-b2ca-451e-8490-42d79a131773-kube-api-access-k6qgr\") pod \"crc-debug-mznd9\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " pod="openstack/crc-debug-mznd9" Jun 06 10:13:01 crc kubenswrapper[4911]: I0606 10:13:01.736793 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mznd9" Jun 06 10:13:02 crc kubenswrapper[4911]: I0606 10:13:02.141877 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mznd9" event={"ID":"ae946fc1-b2ca-451e-8490-42d79a131773","Type":"ContainerStarted","Data":"81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130"} Jun 06 10:13:02 crc kubenswrapper[4911]: I0606 10:13:02.142322 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mznd9" event={"ID":"ae946fc1-b2ca-451e-8490-42d79a131773","Type":"ContainerStarted","Data":"6e08136eb8287e06b62de3396f778e454b4a2ee009e46374eee9e07159f53dcb"} Jun 06 10:13:02 crc kubenswrapper[4911]: I0606 10:13:02.159267 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-mznd9" podStartSLOduration=1.159241829 podStartE2EDuration="1.159241829s" podCreationTimestamp="2025-06-06 10:13:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:13:02.155207404 +0000 UTC m=+3593.430632968" watchObservedRunningTime="2025-06-06 10:13:02.159241829 +0000 UTC m=+3593.434667372" Jun 06 10:13:05 crc kubenswrapper[4911]: I0606 10:13:05.948753 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:13:05 crc kubenswrapper[4911]: E0606 10:13:05.949666 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.329700 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-mznd9"] Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.330516 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-mznd9" podUID="ae946fc1-b2ca-451e-8490-42d79a131773" containerName="container-00" containerID="cri-o://81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130" gracePeriod=2 Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.339626 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-mznd9"] Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.437043 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mznd9" Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.545921 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae946fc1-b2ca-451e-8490-42d79a131773-host\") pod \"ae946fc1-b2ca-451e-8490-42d79a131773\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.545988 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6qgr\" (UniqueName: \"kubernetes.io/projected/ae946fc1-b2ca-451e-8490-42d79a131773-kube-api-access-k6qgr\") pod \"ae946fc1-b2ca-451e-8490-42d79a131773\" (UID: \"ae946fc1-b2ca-451e-8490-42d79a131773\") " Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.546083 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae946fc1-b2ca-451e-8490-42d79a131773-host" (OuterVolumeSpecName: "host") pod "ae946fc1-b2ca-451e-8490-42d79a131773" (UID: "ae946fc1-b2ca-451e-8490-42d79a131773"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.546745 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ae946fc1-b2ca-451e-8490-42d79a131773-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.552605 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae946fc1-b2ca-451e-8490-42d79a131773-kube-api-access-k6qgr" (OuterVolumeSpecName: "kube-api-access-k6qgr") pod "ae946fc1-b2ca-451e-8490-42d79a131773" (UID: "ae946fc1-b2ca-451e-8490-42d79a131773"). InnerVolumeSpecName "kube-api-access-k6qgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:13:12 crc kubenswrapper[4911]: I0606 10:13:12.648270 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6qgr\" (UniqueName: \"kubernetes.io/projected/ae946fc1-b2ca-451e-8490-42d79a131773-kube-api-access-k6qgr\") on node \"crc\" DevicePath \"\"" Jun 06 10:13:13 crc kubenswrapper[4911]: I0606 10:13:13.247666 4911 generic.go:334] "Generic (PLEG): container finished" podID="ae946fc1-b2ca-451e-8490-42d79a131773" containerID="81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130" exitCode=0 Jun 06 10:13:13 crc kubenswrapper[4911]: I0606 10:13:13.247717 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mznd9" Jun 06 10:13:13 crc kubenswrapper[4911]: I0606 10:13:13.247735 4911 scope.go:117] "RemoveContainer" containerID="81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130" Jun 06 10:13:13 crc kubenswrapper[4911]: I0606 10:13:13.283348 4911 scope.go:117] "RemoveContainer" containerID="81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130" Jun 06 10:13:13 crc kubenswrapper[4911]: E0606 10:13:13.285040 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130\": container with ID starting with 81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130 not found: ID does not exist" containerID="81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130" Jun 06 10:13:13 crc kubenswrapper[4911]: I0606 10:13:13.285082 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130"} err="failed to get container status \"81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130\": rpc error: code = NotFound desc = could not find container \"81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130\": container with ID starting with 81a8d43d9cbe4075ba97135130828a100b8d0fadbae5051b2c553d9c14886130 not found: ID does not exist" Jun 06 10:13:13 crc kubenswrapper[4911]: I0606 10:13:13.958923 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae946fc1-b2ca-451e-8490-42d79a131773" path="/var/lib/kubelet/pods/ae946fc1-b2ca-451e-8490-42d79a131773/volumes" Jun 06 10:13:17 crc kubenswrapper[4911]: I0606 10:13:17.948590 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:13:17 crc kubenswrapper[4911]: E0606 10:13:17.949367 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:13:31 crc kubenswrapper[4911]: I0606 10:13:31.947570 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:13:31 crc kubenswrapper[4911]: E0606 10:13:31.948387 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:13:46 crc kubenswrapper[4911]: I0606 10:13:46.948384 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:13:46 crc kubenswrapper[4911]: E0606 10:13:46.949165 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:13:58 crc kubenswrapper[4911]: I0606 10:13:58.947724 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:13:58 crc kubenswrapper[4911]: E0606 10:13:58.948449 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.695508 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-j4rw8"] Jun 06 10:14:01 crc kubenswrapper[4911]: E0606 10:14:01.696170 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae946fc1-b2ca-451e-8490-42d79a131773" containerName="container-00" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.696184 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae946fc1-b2ca-451e-8490-42d79a131773" containerName="container-00" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.696378 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae946fc1-b2ca-451e-8490-42d79a131773" containerName="container-00" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.697003 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j4rw8" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.867934 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f8941396-b2c6-4464-97b2-e21efc031184-host\") pod \"crc-debug-j4rw8\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " pod="openstack/crc-debug-j4rw8" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.868233 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxsbx\" (UniqueName: \"kubernetes.io/projected/f8941396-b2c6-4464-97b2-e21efc031184-kube-api-access-hxsbx\") pod \"crc-debug-j4rw8\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " pod="openstack/crc-debug-j4rw8" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.970859 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f8941396-b2c6-4464-97b2-e21efc031184-host\") pod \"crc-debug-j4rw8\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " pod="openstack/crc-debug-j4rw8" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.970964 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxsbx\" (UniqueName: \"kubernetes.io/projected/f8941396-b2c6-4464-97b2-e21efc031184-kube-api-access-hxsbx\") pod \"crc-debug-j4rw8\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " pod="openstack/crc-debug-j4rw8" Jun 06 10:14:01 crc kubenswrapper[4911]: I0606 10:14:01.970994 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f8941396-b2c6-4464-97b2-e21efc031184-host\") pod \"crc-debug-j4rw8\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " pod="openstack/crc-debug-j4rw8" Jun 06 10:14:02 crc kubenswrapper[4911]: I0606 10:14:02.014705 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxsbx\" (UniqueName: \"kubernetes.io/projected/f8941396-b2c6-4464-97b2-e21efc031184-kube-api-access-hxsbx\") pod \"crc-debug-j4rw8\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " pod="openstack/crc-debug-j4rw8" Jun 06 10:14:02 crc kubenswrapper[4911]: I0606 10:14:02.315195 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j4rw8" Jun 06 10:14:02 crc kubenswrapper[4911]: I0606 10:14:02.743245 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-j4rw8" event={"ID":"f8941396-b2c6-4464-97b2-e21efc031184","Type":"ContainerStarted","Data":"a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea"} Jun 06 10:14:02 crc kubenswrapper[4911]: I0606 10:14:02.743517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-j4rw8" event={"ID":"f8941396-b2c6-4464-97b2-e21efc031184","Type":"ContainerStarted","Data":"738dddfa2350582a9228c88265e5c7926ce7ede9509b8adf2d8ab664f87ad831"} Jun 06 10:14:02 crc kubenswrapper[4911]: I0606 10:14:02.767520 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-j4rw8" podStartSLOduration=1.7674968770000001 podStartE2EDuration="1.767496877s" podCreationTimestamp="2025-06-06 10:14:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:14:02.766608334 +0000 UTC m=+3654.042033877" watchObservedRunningTime="2025-06-06 10:14:02.767496877 +0000 UTC m=+3654.042922420" Jun 06 10:14:09 crc kubenswrapper[4911]: I0606 10:14:09.956626 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:14:09 crc kubenswrapper[4911]: E0606 10:14:09.957551 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:14:12 crc kubenswrapper[4911]: I0606 10:14:12.881798 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-j4rw8"] Jun 06 10:14:12 crc kubenswrapper[4911]: I0606 10:14:12.883119 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-j4rw8" podUID="f8941396-b2c6-4464-97b2-e21efc031184" containerName="container-00" containerID="cri-o://a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea" gracePeriod=2 Jun 06 10:14:12 crc kubenswrapper[4911]: I0606 10:14:12.889890 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-j4rw8"] Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.019778 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j4rw8" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.119554 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxsbx\" (UniqueName: \"kubernetes.io/projected/f8941396-b2c6-4464-97b2-e21efc031184-kube-api-access-hxsbx\") pod \"f8941396-b2c6-4464-97b2-e21efc031184\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.119718 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f8941396-b2c6-4464-97b2-e21efc031184-host\") pod \"f8941396-b2c6-4464-97b2-e21efc031184\" (UID: \"f8941396-b2c6-4464-97b2-e21efc031184\") " Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.119758 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f8941396-b2c6-4464-97b2-e21efc031184-host" (OuterVolumeSpecName: "host") pod "f8941396-b2c6-4464-97b2-e21efc031184" (UID: "f8941396-b2c6-4464-97b2-e21efc031184"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.120497 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f8941396-b2c6-4464-97b2-e21efc031184-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.128032 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8941396-b2c6-4464-97b2-e21efc031184-kube-api-access-hxsbx" (OuterVolumeSpecName: "kube-api-access-hxsbx") pod "f8941396-b2c6-4464-97b2-e21efc031184" (UID: "f8941396-b2c6-4464-97b2-e21efc031184"). InnerVolumeSpecName "kube-api-access-hxsbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.221829 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxsbx\" (UniqueName: \"kubernetes.io/projected/f8941396-b2c6-4464-97b2-e21efc031184-kube-api-access-hxsbx\") on node \"crc\" DevicePath \"\"" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.874018 4911 generic.go:334] "Generic (PLEG): container finished" podID="f8941396-b2c6-4464-97b2-e21efc031184" containerID="a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea" exitCode=0 Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.874211 4911 scope.go:117] "RemoveContainer" containerID="a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.874511 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j4rw8" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.914112 4911 scope.go:117] "RemoveContainer" containerID="a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea" Jun 06 10:14:13 crc kubenswrapper[4911]: E0606 10:14:13.914778 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea\": container with ID starting with a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea not found: ID does not exist" containerID="a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.914874 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea"} err="failed to get container status \"a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea\": rpc error: code = NotFound desc = could not find container \"a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea\": container with ID starting with a68c5e3fc37b015806a11cc0ee2f559af56fa1c0bb52beaaf7fb5752a6059dea not found: ID does not exist" Jun 06 10:14:13 crc kubenswrapper[4911]: I0606 10:14:13.960178 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8941396-b2c6-4464-97b2-e21efc031184" path="/var/lib/kubelet/pods/f8941396-b2c6-4464-97b2-e21efc031184/volumes" Jun 06 10:14:21 crc kubenswrapper[4911]: I0606 10:14:21.755366 4911 scope.go:117] "RemoveContainer" containerID="9fea252b326f3de8da995b2cb684c5861e04b38043a12aa9cd25a19a3f4c87fc" Jun 06 10:14:21 crc kubenswrapper[4911]: I0606 10:14:21.783015 4911 scope.go:117] "RemoveContainer" containerID="14877ae6e9489e1b1564de1842dd2440a322f75dccb3273846f0f82886d48339" Jun 06 10:14:21 crc kubenswrapper[4911]: I0606 10:14:21.834661 4911 scope.go:117] "RemoveContainer" containerID="fc010892836f6281a0ca7e442dbae18a0dd3341119bbca2eb9e82752d39f3c25" Jun 06 10:14:24 crc kubenswrapper[4911]: I0606 10:14:24.949355 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:14:24 crc kubenswrapper[4911]: E0606 10:14:24.950817 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:14:35 crc kubenswrapper[4911]: I0606 10:14:35.949406 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:14:35 crc kubenswrapper[4911]: E0606 10:14:35.951286 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:14:46 crc kubenswrapper[4911]: I0606 10:14:46.949168 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:14:46 crc kubenswrapper[4911]: E0606 10:14:46.950889 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.177085 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75"] Jun 06 10:15:00 crc kubenswrapper[4911]: E0606 10:15:00.178175 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8941396-b2c6-4464-97b2-e21efc031184" containerName="container-00" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.178196 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8941396-b2c6-4464-97b2-e21efc031184" containerName="container-00" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.178473 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8941396-b2c6-4464-97b2-e21efc031184" containerName="container-00" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.179311 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.181379 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.181945 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.186657 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75"] Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.355211 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec3b4e61-2f4a-4261-afd2-3f985059ad60-config-volume\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.355665 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec3b4e61-2f4a-4261-afd2-3f985059ad60-secret-volume\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.355870 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkwgs\" (UniqueName: \"kubernetes.io/projected/ec3b4e61-2f4a-4261-afd2-3f985059ad60-kube-api-access-fkwgs\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.458058 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec3b4e61-2f4a-4261-afd2-3f985059ad60-config-volume\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.458248 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec3b4e61-2f4a-4261-afd2-3f985059ad60-secret-volume\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.458316 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkwgs\" (UniqueName: \"kubernetes.io/projected/ec3b4e61-2f4a-4261-afd2-3f985059ad60-kube-api-access-fkwgs\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.459061 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec3b4e61-2f4a-4261-afd2-3f985059ad60-config-volume\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.464599 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec3b4e61-2f4a-4261-afd2-3f985059ad60-secret-volume\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.474007 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkwgs\" (UniqueName: \"kubernetes.io/projected/ec3b4e61-2f4a-4261-afd2-3f985059ad60-kube-api-access-fkwgs\") pod \"collect-profiles-29153415-fbt75\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.498522 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:00 crc kubenswrapper[4911]: I0606 10:15:00.958468 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:15:00 crc kubenswrapper[4911]: E0606 10:15:00.959063 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:15:01 crc kubenswrapper[4911]: I0606 10:15:01.153564 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75"] Jun 06 10:15:01 crc kubenswrapper[4911]: I0606 10:15:01.298207 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" event={"ID":"ec3b4e61-2f4a-4261-afd2-3f985059ad60","Type":"ContainerStarted","Data":"12c5b3bb1ddca674b5985f9204dd668f8519dfbced9bff7acaf1c83fdcbbdd8a"} Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.312300 4911 generic.go:334] "Generic (PLEG): container finished" podID="ec3b4e61-2f4a-4261-afd2-3f985059ad60" containerID="946c71feb9751b5fbcb3ab17c19f17388fd24479f8ae5e948c4546cd907ac73a" exitCode=0 Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.312354 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" event={"ID":"ec3b4e61-2f4a-4261-afd2-3f985059ad60","Type":"ContainerDied","Data":"946c71feb9751b5fbcb3ab17c19f17388fd24479f8ae5e948c4546cd907ac73a"} Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.320935 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-mwpvk"] Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.322562 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.398794 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-host\") pod \"crc-debug-mwpvk\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.399163 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kdw6\" (UniqueName: \"kubernetes.io/projected/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-kube-api-access-9kdw6\") pod \"crc-debug-mwpvk\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.500743 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-host\") pod \"crc-debug-mwpvk\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.500870 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kdw6\" (UniqueName: \"kubernetes.io/projected/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-kube-api-access-9kdw6\") pod \"crc-debug-mwpvk\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.501149 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-host\") pod \"crc-debug-mwpvk\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.519684 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kdw6\" (UniqueName: \"kubernetes.io/projected/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-kube-api-access-9kdw6\") pod \"crc-debug-mwpvk\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: I0606 10:15:02.647264 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mwpvk" Jun 06 10:15:02 crc kubenswrapper[4911]: W0606 10:15:02.678746 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fbb5ab7_1d2d_48a7_99c1_1ffeec623317.slice/crio-c55b2f25ad8a0ef25d99feca5485a67ffe885a410f98473ddbee173473d2035c WatchSource:0}: Error finding container c55b2f25ad8a0ef25d99feca5485a67ffe885a410f98473ddbee173473d2035c: Status 404 returned error can't find the container with id c55b2f25ad8a0ef25d99feca5485a67ffe885a410f98473ddbee173473d2035c Jun 06 10:15:03 crc kubenswrapper[4911]: I0606 10:15:03.321963 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mwpvk" event={"ID":"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317","Type":"ContainerStarted","Data":"3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692"} Jun 06 10:15:03 crc kubenswrapper[4911]: I0606 10:15:03.322549 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mwpvk" event={"ID":"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317","Type":"ContainerStarted","Data":"c55b2f25ad8a0ef25d99feca5485a67ffe885a410f98473ddbee173473d2035c"} Jun 06 10:15:03 crc kubenswrapper[4911]: I0606 10:15:03.340024 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-mwpvk" podStartSLOduration=1.340005092 podStartE2EDuration="1.340005092s" podCreationTimestamp="2025-06-06 10:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:15:03.334603822 +0000 UTC m=+3714.610029365" watchObservedRunningTime="2025-06-06 10:15:03.340005092 +0000 UTC m=+3714.615430635" Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.608549 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.751201 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec3b4e61-2f4a-4261-afd2-3f985059ad60-config-volume\") pod \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.751273 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkwgs\" (UniqueName: \"kubernetes.io/projected/ec3b4e61-2f4a-4261-afd2-3f985059ad60-kube-api-access-fkwgs\") pod \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.751334 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec3b4e61-2f4a-4261-afd2-3f985059ad60-secret-volume\") pod \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\" (UID: \"ec3b4e61-2f4a-4261-afd2-3f985059ad60\") " Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.752009 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec3b4e61-2f4a-4261-afd2-3f985059ad60-config-volume" (OuterVolumeSpecName: "config-volume") pod "ec3b4e61-2f4a-4261-afd2-3f985059ad60" (UID: "ec3b4e61-2f4a-4261-afd2-3f985059ad60"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.759397 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec3b4e61-2f4a-4261-afd2-3f985059ad60-kube-api-access-fkwgs" (OuterVolumeSpecName: "kube-api-access-fkwgs") pod "ec3b4e61-2f4a-4261-afd2-3f985059ad60" (UID: "ec3b4e61-2f4a-4261-afd2-3f985059ad60"). InnerVolumeSpecName "kube-api-access-fkwgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.760620 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec3b4e61-2f4a-4261-afd2-3f985059ad60-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ec3b4e61-2f4a-4261-afd2-3f985059ad60" (UID: "ec3b4e61-2f4a-4261-afd2-3f985059ad60"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.854292 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec3b4e61-2f4a-4261-afd2-3f985059ad60-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.854335 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkwgs\" (UniqueName: \"kubernetes.io/projected/ec3b4e61-2f4a-4261-afd2-3f985059ad60-kube-api-access-fkwgs\") on node \"crc\" DevicePath \"\"" Jun 06 10:15:04 crc kubenswrapper[4911]: I0606 10:15:04.854351 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec3b4e61-2f4a-4261-afd2-3f985059ad60-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:15:05 crc kubenswrapper[4911]: I0606 10:15:05.343783 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" event={"ID":"ec3b4e61-2f4a-4261-afd2-3f985059ad60","Type":"ContainerDied","Data":"12c5b3bb1ddca674b5985f9204dd668f8519dfbced9bff7acaf1c83fdcbbdd8a"} Jun 06 10:15:05 crc kubenswrapper[4911]: I0606 10:15:05.343835 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12c5b3bb1ddca674b5985f9204dd668f8519dfbced9bff7acaf1c83fdcbbdd8a" Jun 06 10:15:05 crc kubenswrapper[4911]: I0606 10:15:05.343848 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75" Jun 06 10:15:05 crc kubenswrapper[4911]: I0606 10:15:05.700733 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn"] Jun 06 10:15:05 crc kubenswrapper[4911]: I0606 10:15:05.710517 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153370-68mhn"] Jun 06 10:15:05 crc kubenswrapper[4911]: I0606 10:15:05.959341 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d2767cd-89b4-421b-86f0-c588fbd2dddf" path="/var/lib/kubelet/pods/5d2767cd-89b4-421b-86f0-c588fbd2dddf/volumes" Jun 06 10:15:11 crc kubenswrapper[4911]: I0606 10:15:11.948056 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:15:11 crc kubenswrapper[4911]: E0606 10:15:11.948871 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.231965 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-mwpvk"] Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.232456 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-mwpvk" podUID="5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" containerName="container-00" containerID="cri-o://3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692" gracePeriod=2 Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.241049 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-mwpvk"] Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.338855 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mwpvk" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.430112 4911 generic.go:334] "Generic (PLEG): container finished" podID="5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" containerID="3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692" exitCode=0 Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.430172 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mwpvk" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.430206 4911 scope.go:117] "RemoveContainer" containerID="3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.430650 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-host\") pod \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.430763 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-host" (OuterVolumeSpecName: "host") pod "5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" (UID: "5fbb5ab7-1d2d-48a7-99c1-1ffeec623317"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.430787 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kdw6\" (UniqueName: \"kubernetes.io/projected/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-kube-api-access-9kdw6\") pod \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\" (UID: \"5fbb5ab7-1d2d-48a7-99c1-1ffeec623317\") " Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.431667 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.436792 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-kube-api-access-9kdw6" (OuterVolumeSpecName: "kube-api-access-9kdw6") pod "5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" (UID: "5fbb5ab7-1d2d-48a7-99c1-1ffeec623317"). InnerVolumeSpecName "kube-api-access-9kdw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.491521 4911 scope.go:117] "RemoveContainer" containerID="3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692" Jun 06 10:15:13 crc kubenswrapper[4911]: E0606 10:15:13.491927 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692\": container with ID starting with 3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692 not found: ID does not exist" containerID="3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.491955 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692"} err="failed to get container status \"3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692\": rpc error: code = NotFound desc = could not find container \"3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692\": container with ID starting with 3a94bb8f760ed89e0ccf3d87d94d1e3213fc7db70d3b474fea4da2f391c77692 not found: ID does not exist" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.534858 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kdw6\" (UniqueName: \"kubernetes.io/projected/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317-kube-api-access-9kdw6\") on node \"crc\" DevicePath \"\"" Jun 06 10:15:13 crc kubenswrapper[4911]: I0606 10:15:13.957487 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" path="/var/lib/kubelet/pods/5fbb5ab7-1d2d-48a7-99c1-1ffeec623317/volumes" Jun 06 10:15:21 crc kubenswrapper[4911]: I0606 10:15:21.933200 4911 scope.go:117] "RemoveContainer" containerID="e24afeff7b93f095d0b78ee9f6d835dad4a181acd727cc0fc9e0e56ddea9b8ee" Jun 06 10:15:25 crc kubenswrapper[4911]: I0606 10:15:25.948414 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:15:25 crc kubenswrapper[4911]: E0606 10:15:25.949164 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:15:38 crc kubenswrapper[4911]: I0606 10:15:38.948477 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:15:38 crc kubenswrapper[4911]: E0606 10:15:38.949989 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:15:50 crc kubenswrapper[4911]: I0606 10:15:50.949109 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:15:50 crc kubenswrapper[4911]: E0606 10:15:50.950268 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.649913 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-p2mdv"] Jun 06 10:16:01 crc kubenswrapper[4911]: E0606 10:16:01.650839 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" containerName="container-00" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.650854 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" containerName="container-00" Jun 06 10:16:01 crc kubenswrapper[4911]: E0606 10:16:01.650872 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec3b4e61-2f4a-4261-afd2-3f985059ad60" containerName="collect-profiles" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.650878 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec3b4e61-2f4a-4261-afd2-3f985059ad60" containerName="collect-profiles" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.651196 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec3b4e61-2f4a-4261-afd2-3f985059ad60" containerName="collect-profiles" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.651219 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fbb5ab7-1d2d-48a7-99c1-1ffeec623317" containerName="container-00" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.651877 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-p2mdv" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.762444 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ccb84234-07a1-4fbb-9342-574b741aaa86-host\") pod \"crc-debug-p2mdv\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " pod="openstack/crc-debug-p2mdv" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.762512 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brqht\" (UniqueName: \"kubernetes.io/projected/ccb84234-07a1-4fbb-9342-574b741aaa86-kube-api-access-brqht\") pod \"crc-debug-p2mdv\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " pod="openstack/crc-debug-p2mdv" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.865129 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ccb84234-07a1-4fbb-9342-574b741aaa86-host\") pod \"crc-debug-p2mdv\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " pod="openstack/crc-debug-p2mdv" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.865254 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brqht\" (UniqueName: \"kubernetes.io/projected/ccb84234-07a1-4fbb-9342-574b741aaa86-kube-api-access-brqht\") pod \"crc-debug-p2mdv\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " pod="openstack/crc-debug-p2mdv" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.865302 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ccb84234-07a1-4fbb-9342-574b741aaa86-host\") pod \"crc-debug-p2mdv\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " pod="openstack/crc-debug-p2mdv" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.886376 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brqht\" (UniqueName: \"kubernetes.io/projected/ccb84234-07a1-4fbb-9342-574b741aaa86-kube-api-access-brqht\") pod \"crc-debug-p2mdv\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " pod="openstack/crc-debug-p2mdv" Jun 06 10:16:01 crc kubenswrapper[4911]: I0606 10:16:01.970497 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-p2mdv" Jun 06 10:16:02 crc kubenswrapper[4911]: I0606 10:16:02.882075 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-p2mdv" event={"ID":"ccb84234-07a1-4fbb-9342-574b741aaa86","Type":"ContainerStarted","Data":"7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3"} Jun 06 10:16:02 crc kubenswrapper[4911]: I0606 10:16:02.882389 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-p2mdv" event={"ID":"ccb84234-07a1-4fbb-9342-574b741aaa86","Type":"ContainerStarted","Data":"3d9fe354d90d2b4ae36dd28f996b055bc15ae05a63def893e4d9477540365514"} Jun 06 10:16:02 crc kubenswrapper[4911]: I0606 10:16:02.902007 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-p2mdv" podStartSLOduration=1.9019842900000001 podStartE2EDuration="1.90198429s" podCreationTimestamp="2025-06-06 10:16:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:16:02.895577875 +0000 UTC m=+3774.171003428" watchObservedRunningTime="2025-06-06 10:16:02.90198429 +0000 UTC m=+3774.177409843" Jun 06 10:16:03 crc kubenswrapper[4911]: I0606 10:16:03.948426 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:16:03 crc kubenswrapper[4911]: E0606 10:16:03.949972 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.575032 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-p2mdv"] Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.576002 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-p2mdv" podUID="ccb84234-07a1-4fbb-9342-574b741aaa86" containerName="container-00" containerID="cri-o://7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3" gracePeriod=2 Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.585463 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-p2mdv"] Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.686992 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-p2mdv" Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.722342 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brqht\" (UniqueName: \"kubernetes.io/projected/ccb84234-07a1-4fbb-9342-574b741aaa86-kube-api-access-brqht\") pod \"ccb84234-07a1-4fbb-9342-574b741aaa86\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.722616 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ccb84234-07a1-4fbb-9342-574b741aaa86-host\") pod \"ccb84234-07a1-4fbb-9342-574b741aaa86\" (UID: \"ccb84234-07a1-4fbb-9342-574b741aaa86\") " Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.722795 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ccb84234-07a1-4fbb-9342-574b741aaa86-host" (OuterVolumeSpecName: "host") pod "ccb84234-07a1-4fbb-9342-574b741aaa86" (UID: "ccb84234-07a1-4fbb-9342-574b741aaa86"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.723384 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ccb84234-07a1-4fbb-9342-574b741aaa86-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.729955 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccb84234-07a1-4fbb-9342-574b741aaa86-kube-api-access-brqht" (OuterVolumeSpecName: "kube-api-access-brqht") pod "ccb84234-07a1-4fbb-9342-574b741aaa86" (UID: "ccb84234-07a1-4fbb-9342-574b741aaa86"). InnerVolumeSpecName "kube-api-access-brqht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.825127 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brqht\" (UniqueName: \"kubernetes.io/projected/ccb84234-07a1-4fbb-9342-574b741aaa86-kube-api-access-brqht\") on node \"crc\" DevicePath \"\"" Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.973527 4911 generic.go:334] "Generic (PLEG): container finished" podID="ccb84234-07a1-4fbb-9342-574b741aaa86" containerID="7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3" exitCode=0 Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.973613 4911 scope.go:117] "RemoveContainer" containerID="7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3" Jun 06 10:16:12 crc kubenswrapper[4911]: I0606 10:16:12.973669 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-p2mdv" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.012256 4911 scope.go:117] "RemoveContainer" containerID="7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3" Jun 06 10:16:13 crc kubenswrapper[4911]: E0606 10:16:13.012989 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3\": container with ID starting with 7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3 not found: ID does not exist" containerID="7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.013026 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3"} err="failed to get container status \"7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3\": rpc error: code = NotFound desc = could not find container \"7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3\": container with ID starting with 7d64611ce8055205299f3c8e37ef1a99bf7f20f745c1e4b166cafd1c9d3f1ce3 not found: ID does not exist" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.274668 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-grr5v"] Jun 06 10:16:13 crc kubenswrapper[4911]: E0606 10:16:13.275454 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccb84234-07a1-4fbb-9342-574b741aaa86" containerName="container-00" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.275486 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccb84234-07a1-4fbb-9342-574b741aaa86" containerName="container-00" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.275743 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccb84234-07a1-4fbb-9342-574b741aaa86" containerName="container-00" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.277554 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.287487 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grr5v"] Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.336299 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-utilities\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.336448 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-catalog-content\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.336631 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxbfh\" (UniqueName: \"kubernetes.io/projected/8040a62a-fa39-41c8-a7fc-b28059b6e367-kube-api-access-kxbfh\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.440228 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-catalog-content\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.440435 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxbfh\" (UniqueName: \"kubernetes.io/projected/8040a62a-fa39-41c8-a7fc-b28059b6e367-kube-api-access-kxbfh\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.440508 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-utilities\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.440666 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-catalog-content\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.441270 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-utilities\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.482583 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxbfh\" (UniqueName: \"kubernetes.io/projected/8040a62a-fa39-41c8-a7fc-b28059b6e367-kube-api-access-kxbfh\") pod \"community-operators-grr5v\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.602987 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:13 crc kubenswrapper[4911]: I0606 10:16:13.958056 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccb84234-07a1-4fbb-9342-574b741aaa86" path="/var/lib/kubelet/pods/ccb84234-07a1-4fbb-9342-574b741aaa86/volumes" Jun 06 10:16:14 crc kubenswrapper[4911]: I0606 10:16:14.340874 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grr5v"] Jun 06 10:16:14 crc kubenswrapper[4911]: I0606 10:16:14.996960 4911 generic.go:334] "Generic (PLEG): container finished" podID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerID="b37f8ddea9985b16fb9d785f7307d79ea55fa97b714de6b068e407d7e885cbcf" exitCode=0 Jun 06 10:16:14 crc kubenswrapper[4911]: I0606 10:16:14.997069 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grr5v" event={"ID":"8040a62a-fa39-41c8-a7fc-b28059b6e367","Type":"ContainerDied","Data":"b37f8ddea9985b16fb9d785f7307d79ea55fa97b714de6b068e407d7e885cbcf"} Jun 06 10:16:14 crc kubenswrapper[4911]: I0606 10:16:14.997293 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grr5v" event={"ID":"8040a62a-fa39-41c8-a7fc-b28059b6e367","Type":"ContainerStarted","Data":"2597b12f1256e79c1f5411e1c3f678f5d4cd4594d6ba5f89733d15758db4e948"} Jun 06 10:16:14 crc kubenswrapper[4911]: I0606 10:16:14.999010 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:16:16 crc kubenswrapper[4911]: I0606 10:16:16.948883 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:16:16 crc kubenswrapper[4911]: E0606 10:16:16.950470 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:16:20 crc kubenswrapper[4911]: I0606 10:16:20.046399 4911 generic.go:334] "Generic (PLEG): container finished" podID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerID="7855574e7907283d59c95505c66a68ee19754fedfd47c58a8c37a2b69864a41f" exitCode=0 Jun 06 10:16:20 crc kubenswrapper[4911]: I0606 10:16:20.046481 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grr5v" event={"ID":"8040a62a-fa39-41c8-a7fc-b28059b6e367","Type":"ContainerDied","Data":"7855574e7907283d59c95505c66a68ee19754fedfd47c58a8c37a2b69864a41f"} Jun 06 10:16:22 crc kubenswrapper[4911]: I0606 10:16:22.001882 4911 scope.go:117] "RemoveContainer" containerID="1e55d751b1ea39d14f2e00aaf058c93d9ab99907c97346cca86e45c0a10a0c5d" Jun 06 10:16:22 crc kubenswrapper[4911]: I0606 10:16:22.071117 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grr5v" event={"ID":"8040a62a-fa39-41c8-a7fc-b28059b6e367","Type":"ContainerStarted","Data":"cd5ce51de56833cd29c72fe92f3d08438b2271bf5fcf7b58ae696cfa9a6a1b47"} Jun 06 10:16:22 crc kubenswrapper[4911]: I0606 10:16:22.096194 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-grr5v" podStartSLOduration=2.946072644 podStartE2EDuration="9.096164998s" podCreationTimestamp="2025-06-06 10:16:13 +0000 UTC" firstStartedPulling="2025-06-06 10:16:14.998759005 +0000 UTC m=+3786.274184548" lastFinishedPulling="2025-06-06 10:16:21.148851369 +0000 UTC m=+3792.424276902" observedRunningTime="2025-06-06 10:16:22.089190087 +0000 UTC m=+3793.364615630" watchObservedRunningTime="2025-06-06 10:16:22.096164998 +0000 UTC m=+3793.371590541" Jun 06 10:16:23 crc kubenswrapper[4911]: I0606 10:16:23.603885 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:23 crc kubenswrapper[4911]: I0606 10:16:23.604229 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:23 crc kubenswrapper[4911]: I0606 10:16:23.651717 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:28 crc kubenswrapper[4911]: I0606 10:16:28.948408 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:16:28 crc kubenswrapper[4911]: E0606 10:16:28.949134 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:16:33 crc kubenswrapper[4911]: I0606 10:16:33.658132 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-grr5v" Jun 06 10:16:33 crc kubenswrapper[4911]: I0606 10:16:33.733991 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grr5v"] Jun 06 10:16:33 crc kubenswrapper[4911]: I0606 10:16:33.785517 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-clqk2"] Jun 06 10:16:33 crc kubenswrapper[4911]: I0606 10:16:33.786237 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-clqk2" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="registry-server" containerID="cri-o://52149ebc1aba09d2aaf6aa6462a867b040641303026c67f58db2d53cc5b7d51c" gracePeriod=2 Jun 06 10:16:34 crc kubenswrapper[4911]: I0606 10:16:34.196493 4911 generic.go:334] "Generic (PLEG): container finished" podID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerID="52149ebc1aba09d2aaf6aa6462a867b040641303026c67f58db2d53cc5b7d51c" exitCode=0 Jun 06 10:16:34 crc kubenswrapper[4911]: I0606 10:16:34.197396 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqk2" event={"ID":"6bf69688-d2e8-4fa7-aae0-4c630fc6234b","Type":"ContainerDied","Data":"52149ebc1aba09d2aaf6aa6462a867b040641303026c67f58db2d53cc5b7d51c"} Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.011823 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-clqk2" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.076777 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-catalog-content\") pod \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.077185 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmv5d\" (UniqueName: \"kubernetes.io/projected/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-kube-api-access-nmv5d\") pod \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.077428 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-utilities\") pod \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\" (UID: \"6bf69688-d2e8-4fa7-aae0-4c630fc6234b\") " Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.078204 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-utilities" (OuterVolumeSpecName: "utilities") pod "6bf69688-d2e8-4fa7-aae0-4c630fc6234b" (UID: "6bf69688-d2e8-4fa7-aae0-4c630fc6234b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.088550 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-kube-api-access-nmv5d" (OuterVolumeSpecName: "kube-api-access-nmv5d") pod "6bf69688-d2e8-4fa7-aae0-4c630fc6234b" (UID: "6bf69688-d2e8-4fa7-aae0-4c630fc6234b"). InnerVolumeSpecName "kube-api-access-nmv5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.115031 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6bf69688-d2e8-4fa7-aae0-4c630fc6234b" (UID: "6bf69688-d2e8-4fa7-aae0-4c630fc6234b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.180202 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.180242 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmv5d\" (UniqueName: \"kubernetes.io/projected/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-kube-api-access-nmv5d\") on node \"crc\" DevicePath \"\"" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.180254 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bf69688-d2e8-4fa7-aae0-4c630fc6234b-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.210222 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-clqk2" event={"ID":"6bf69688-d2e8-4fa7-aae0-4c630fc6234b","Type":"ContainerDied","Data":"b8ff18b6b27f2116af46a4a68d86886c616e9d4494e563011f1fcac75456b81b"} Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.210276 4911 scope.go:117] "RemoveContainer" containerID="52149ebc1aba09d2aaf6aa6462a867b040641303026c67f58db2d53cc5b7d51c" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.210313 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-clqk2" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.234319 4911 scope.go:117] "RemoveContainer" containerID="571fe5570aa8abfb791f5f2aaa695f77b8713a8be863db4fa0c834c9515f6b8c" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.247527 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-clqk2"] Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.255458 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-clqk2"] Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.294019 4911 scope.go:117] "RemoveContainer" containerID="f1405b47528c43db845f42761c2120e0914ec91ce52d99290e4794f09be95221" Jun 06 10:16:35 crc kubenswrapper[4911]: I0606 10:16:35.958844 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" path="/var/lib/kubelet/pods/6bf69688-d2e8-4fa7-aae0-4c630fc6234b/volumes" Jun 06 10:16:39 crc kubenswrapper[4911]: I0606 10:16:39.956007 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:16:39 crc kubenswrapper[4911]: E0606 10:16:39.957259 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:16:54 crc kubenswrapper[4911]: I0606 10:16:54.948279 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:16:54 crc kubenswrapper[4911]: E0606 10:16:54.949152 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:17:01 crc kubenswrapper[4911]: I0606 10:17:01.978636 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-j49pb"] Jun 06 10:17:01 crc kubenswrapper[4911]: E0606 10:17:01.979560 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="registry-server" Jun 06 10:17:01 crc kubenswrapper[4911]: I0606 10:17:01.979575 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="registry-server" Jun 06 10:17:01 crc kubenswrapper[4911]: E0606 10:17:01.979603 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="extract-content" Jun 06 10:17:01 crc kubenswrapper[4911]: I0606 10:17:01.979610 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="extract-content" Jun 06 10:17:01 crc kubenswrapper[4911]: E0606 10:17:01.979625 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="extract-utilities" Jun 06 10:17:01 crc kubenswrapper[4911]: I0606 10:17:01.979630 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="extract-utilities" Jun 06 10:17:01 crc kubenswrapper[4911]: I0606 10:17:01.979823 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf69688-d2e8-4fa7-aae0-4c630fc6234b" containerName="registry-server" Jun 06 10:17:01 crc kubenswrapper[4911]: I0606 10:17:01.980828 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.148355 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5sbn\" (UniqueName: \"kubernetes.io/projected/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-kube-api-access-r5sbn\") pod \"crc-debug-j49pb\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.148456 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-host\") pod \"crc-debug-j49pb\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.250064 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5sbn\" (UniqueName: \"kubernetes.io/projected/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-kube-api-access-r5sbn\") pod \"crc-debug-j49pb\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.250251 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-host\") pod \"crc-debug-j49pb\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.250420 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-host\") pod \"crc-debug-j49pb\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.275774 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5sbn\" (UniqueName: \"kubernetes.io/projected/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-kube-api-access-r5sbn\") pod \"crc-debug-j49pb\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.300852 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j49pb" Jun 06 10:17:02 crc kubenswrapper[4911]: I0606 10:17:02.466390 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-j49pb" event={"ID":"9e2da120-f98d-4d17-98f1-d09d5cc9c66c","Type":"ContainerStarted","Data":"13ca744df624efa049eb80028bef9cf5102a94c9dd0b5e2212d20446e9cd12cb"} Jun 06 10:17:03 crc kubenswrapper[4911]: I0606 10:17:03.476710 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-j49pb" event={"ID":"9e2da120-f98d-4d17-98f1-d09d5cc9c66c","Type":"ContainerStarted","Data":"a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346"} Jun 06 10:17:03 crc kubenswrapper[4911]: I0606 10:17:03.491048 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-j49pb" podStartSLOduration=2.491022212 podStartE2EDuration="2.491022212s" podCreationTimestamp="2025-06-06 10:17:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:17:03.489315498 +0000 UTC m=+3834.764741041" watchObservedRunningTime="2025-06-06 10:17:03.491022212 +0000 UTC m=+3834.766447755" Jun 06 10:17:05 crc kubenswrapper[4911]: I0606 10:17:05.948475 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:17:05 crc kubenswrapper[4911]: E0606 10:17:05.949015 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:17:12 crc kubenswrapper[4911]: I0606 10:17:12.885504 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-j49pb"] Jun 06 10:17:12 crc kubenswrapper[4911]: I0606 10:17:12.887050 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-j49pb" podUID="9e2da120-f98d-4d17-98f1-d09d5cc9c66c" containerName="container-00" containerID="cri-o://a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346" gracePeriod=2 Jun 06 10:17:12 crc kubenswrapper[4911]: I0606 10:17:12.901842 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-j49pb"] Jun 06 10:17:12 crc kubenswrapper[4911]: I0606 10:17:12.971513 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j49pb" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.065837 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5sbn\" (UniqueName: \"kubernetes.io/projected/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-kube-api-access-r5sbn\") pod \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.065966 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-host\") pod \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\" (UID: \"9e2da120-f98d-4d17-98f1-d09d5cc9c66c\") " Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.067969 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-host" (OuterVolumeSpecName: "host") pod "9e2da120-f98d-4d17-98f1-d09d5cc9c66c" (UID: "9e2da120-f98d-4d17-98f1-d09d5cc9c66c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.073396 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-kube-api-access-r5sbn" (OuterVolumeSpecName: "kube-api-access-r5sbn") pod "9e2da120-f98d-4d17-98f1-d09d5cc9c66c" (UID: "9e2da120-f98d-4d17-98f1-d09d5cc9c66c"). InnerVolumeSpecName "kube-api-access-r5sbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.168106 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5sbn\" (UniqueName: \"kubernetes.io/projected/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-kube-api-access-r5sbn\") on node \"crc\" DevicePath \"\"" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.168140 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e2da120-f98d-4d17-98f1-d09d5cc9c66c-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.565991 4911 generic.go:334] "Generic (PLEG): container finished" podID="9e2da120-f98d-4d17-98f1-d09d5cc9c66c" containerID="a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346" exitCode=0 Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.566102 4911 scope.go:117] "RemoveContainer" containerID="a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.566132 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j49pb" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.597552 4911 scope.go:117] "RemoveContainer" containerID="a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346" Jun 06 10:17:13 crc kubenswrapper[4911]: E0606 10:17:13.598172 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346\": container with ID starting with a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346 not found: ID does not exist" containerID="a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.598226 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346"} err="failed to get container status \"a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346\": rpc error: code = NotFound desc = could not find container \"a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346\": container with ID starting with a12009855908134d2116f383dabda762909aea5576dd558e8e11ce472047d346 not found: ID does not exist" Jun 06 10:17:13 crc kubenswrapper[4911]: I0606 10:17:13.961391 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e2da120-f98d-4d17-98f1-d09d5cc9c66c" path="/var/lib/kubelet/pods/9e2da120-f98d-4d17-98f1-d09d5cc9c66c/volumes" Jun 06 10:17:19 crc kubenswrapper[4911]: I0606 10:17:19.954283 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:17:19 crc kubenswrapper[4911]: E0606 10:17:19.955180 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:17:31 crc kubenswrapper[4911]: I0606 10:17:31.948475 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:17:31 crc kubenswrapper[4911]: E0606 10:17:31.949337 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.258166 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ct4ph"] Jun 06 10:17:40 crc kubenswrapper[4911]: E0606 10:17:40.259361 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e2da120-f98d-4d17-98f1-d09d5cc9c66c" containerName="container-00" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.259377 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e2da120-f98d-4d17-98f1-d09d5cc9c66c" containerName="container-00" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.259642 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e2da120-f98d-4d17-98f1-d09d5cc9c66c" containerName="container-00" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.261809 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.276315 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ct4ph"] Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.362908 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-catalog-content\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.363143 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-utilities\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.363181 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtll2\" (UniqueName: \"kubernetes.io/projected/54d5f5e1-a17b-4298-8306-30b66f381462-kube-api-access-dtll2\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.465288 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-utilities\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.465344 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtll2\" (UniqueName: \"kubernetes.io/projected/54d5f5e1-a17b-4298-8306-30b66f381462-kube-api-access-dtll2\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.465443 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-catalog-content\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.465890 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-utilities\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.465975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-catalog-content\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.486797 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtll2\" (UniqueName: \"kubernetes.io/projected/54d5f5e1-a17b-4298-8306-30b66f381462-kube-api-access-dtll2\") pod \"redhat-operators-ct4ph\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:40 crc kubenswrapper[4911]: I0606 10:17:40.591959 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:41 crc kubenswrapper[4911]: I0606 10:17:41.265463 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ct4ph"] Jun 06 10:17:41 crc kubenswrapper[4911]: I0606 10:17:41.866801 4911 generic.go:334] "Generic (PLEG): container finished" podID="54d5f5e1-a17b-4298-8306-30b66f381462" containerID="fc3e91cd1a217359a3a33cebacc190fb8cb376d5453708dcca3fe8058490af1c" exitCode=0 Jun 06 10:17:41 crc kubenswrapper[4911]: I0606 10:17:41.866971 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ct4ph" event={"ID":"54d5f5e1-a17b-4298-8306-30b66f381462","Type":"ContainerDied","Data":"fc3e91cd1a217359a3a33cebacc190fb8cb376d5453708dcca3fe8058490af1c"} Jun 06 10:17:41 crc kubenswrapper[4911]: I0606 10:17:41.867129 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ct4ph" event={"ID":"54d5f5e1-a17b-4298-8306-30b66f381462","Type":"ContainerStarted","Data":"0f19c248fe0cdc8c3e8947c9a9cbfd024200873da2fe54b07e9a079cc8f0ec6c"} Jun 06 10:17:42 crc kubenswrapper[4911]: I0606 10:17:42.876591 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ct4ph" event={"ID":"54d5f5e1-a17b-4298-8306-30b66f381462","Type":"ContainerStarted","Data":"b1139b7f9ab2aeb4315b8d9e517b1451893df5e7ee15e8c9e908957bb312864f"} Jun 06 10:17:43 crc kubenswrapper[4911]: I0606 10:17:43.890315 4911 generic.go:334] "Generic (PLEG): container finished" podID="54d5f5e1-a17b-4298-8306-30b66f381462" containerID="b1139b7f9ab2aeb4315b8d9e517b1451893df5e7ee15e8c9e908957bb312864f" exitCode=0 Jun 06 10:17:43 crc kubenswrapper[4911]: I0606 10:17:43.890372 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ct4ph" event={"ID":"54d5f5e1-a17b-4298-8306-30b66f381462","Type":"ContainerDied","Data":"b1139b7f9ab2aeb4315b8d9e517b1451893df5e7ee15e8c9e908957bb312864f"} Jun 06 10:17:43 crc kubenswrapper[4911]: I0606 10:17:43.949160 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:17:43 crc kubenswrapper[4911]: E0606 10:17:43.949678 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:17:44 crc kubenswrapper[4911]: I0606 10:17:44.909175 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ct4ph" event={"ID":"54d5f5e1-a17b-4298-8306-30b66f381462","Type":"ContainerStarted","Data":"a621d21971440a53d9dac1ef1184bcad83c8d2823bee655a794738913c26538b"} Jun 06 10:17:44 crc kubenswrapper[4911]: I0606 10:17:44.928434 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ct4ph" podStartSLOduration=2.4510016390000002 podStartE2EDuration="4.928409859s" podCreationTimestamp="2025-06-06 10:17:40 +0000 UTC" firstStartedPulling="2025-06-06 10:17:41.870494114 +0000 UTC m=+3873.145919657" lastFinishedPulling="2025-06-06 10:17:44.347902334 +0000 UTC m=+3875.623327877" observedRunningTime="2025-06-06 10:17:44.924894258 +0000 UTC m=+3876.200319801" watchObservedRunningTime="2025-06-06 10:17:44.928409859 +0000 UTC m=+3876.203835402" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.014605 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4rdt8"] Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.016845 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.024405 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4rdt8"] Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.064917 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-catalog-content\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.065277 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfjzg\" (UniqueName: \"kubernetes.io/projected/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-kube-api-access-cfjzg\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.065458 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-utilities\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.167271 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-utilities\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.167409 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-catalog-content\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.167480 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfjzg\" (UniqueName: \"kubernetes.io/projected/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-kube-api-access-cfjzg\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.167801 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-catalog-content\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.167846 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-utilities\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.193024 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfjzg\" (UniqueName: \"kubernetes.io/projected/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-kube-api-access-cfjzg\") pod \"certified-operators-4rdt8\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:45 crc kubenswrapper[4911]: I0606 10:17:45.386713 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:46 crc kubenswrapper[4911]: I0606 10:17:46.101250 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4rdt8"] Jun 06 10:17:46 crc kubenswrapper[4911]: W0606 10:17:46.115463 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49476096_c8e5_4a37_9b5f_32c8b51ab8cb.slice/crio-7350882dffc8d53f420094643ea1475e8d0125551bc9e7155e30f6cf1a389cb9 WatchSource:0}: Error finding container 7350882dffc8d53f420094643ea1475e8d0125551bc9e7155e30f6cf1a389cb9: Status 404 returned error can't find the container with id 7350882dffc8d53f420094643ea1475e8d0125551bc9e7155e30f6cf1a389cb9 Jun 06 10:17:46 crc kubenswrapper[4911]: I0606 10:17:46.932261 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rdt8" event={"ID":"49476096-c8e5-4a37-9b5f-32c8b51ab8cb","Type":"ContainerStarted","Data":"1736365f29dc0e33755b0dea7a396731f82dcf44088378b6d7ba8f706c95ac7e"} Jun 06 10:17:46 crc kubenswrapper[4911]: I0606 10:17:46.932664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rdt8" event={"ID":"49476096-c8e5-4a37-9b5f-32c8b51ab8cb","Type":"ContainerStarted","Data":"7350882dffc8d53f420094643ea1475e8d0125551bc9e7155e30f6cf1a389cb9"} Jun 06 10:17:47 crc kubenswrapper[4911]: I0606 10:17:47.944296 4911 generic.go:334] "Generic (PLEG): container finished" podID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerID="1736365f29dc0e33755b0dea7a396731f82dcf44088378b6d7ba8f706c95ac7e" exitCode=0 Jun 06 10:17:47 crc kubenswrapper[4911]: I0606 10:17:47.944349 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rdt8" event={"ID":"49476096-c8e5-4a37-9b5f-32c8b51ab8cb","Type":"ContainerDied","Data":"1736365f29dc0e33755b0dea7a396731f82dcf44088378b6d7ba8f706c95ac7e"} Jun 06 10:17:49 crc kubenswrapper[4911]: I0606 10:17:49.982885 4911 generic.go:334] "Generic (PLEG): container finished" podID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerID="e86762a831a3899c810ce6862905f874e2e0a7e94fde1d64707632f5946cea17" exitCode=0 Jun 06 10:17:49 crc kubenswrapper[4911]: I0606 10:17:49.982938 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rdt8" event={"ID":"49476096-c8e5-4a37-9b5f-32c8b51ab8cb","Type":"ContainerDied","Data":"e86762a831a3899c810ce6862905f874e2e0a7e94fde1d64707632f5946cea17"} Jun 06 10:17:50 crc kubenswrapper[4911]: I0606 10:17:50.592661 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:50 crc kubenswrapper[4911]: I0606 10:17:50.593273 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:50 crc kubenswrapper[4911]: I0606 10:17:50.648267 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:51 crc kubenswrapper[4911]: I0606 10:17:51.052600 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:52 crc kubenswrapper[4911]: I0606 10:17:52.006129 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rdt8" event={"ID":"49476096-c8e5-4a37-9b5f-32c8b51ab8cb","Type":"ContainerStarted","Data":"8f28f3227fc270358e17cf9b2e0a586da32280e30a74896172f891afd11a286c"} Jun 06 10:17:52 crc kubenswrapper[4911]: I0606 10:17:52.028719 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4rdt8" podStartSLOduration=5.054100189 podStartE2EDuration="8.028699896s" podCreationTimestamp="2025-06-06 10:17:44 +0000 UTC" firstStartedPulling="2025-06-06 10:17:47.946599234 +0000 UTC m=+3879.222024777" lastFinishedPulling="2025-06-06 10:17:50.921198901 +0000 UTC m=+3882.196624484" observedRunningTime="2025-06-06 10:17:52.02538892 +0000 UTC m=+3883.300814463" watchObservedRunningTime="2025-06-06 10:17:52.028699896 +0000 UTC m=+3883.304125439" Jun 06 10:17:52 crc kubenswrapper[4911]: I0606 10:17:52.202220 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ct4ph"] Jun 06 10:17:53 crc kubenswrapper[4911]: I0606 10:17:53.015796 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ct4ph" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="registry-server" containerID="cri-o://a621d21971440a53d9dac1ef1184bcad83c8d2823bee655a794738913c26538b" gracePeriod=2 Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.029349 4911 generic.go:334] "Generic (PLEG): container finished" podID="54d5f5e1-a17b-4298-8306-30b66f381462" containerID="a621d21971440a53d9dac1ef1184bcad83c8d2823bee655a794738913c26538b" exitCode=0 Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.029393 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ct4ph" event={"ID":"54d5f5e1-a17b-4298-8306-30b66f381462","Type":"ContainerDied","Data":"a621d21971440a53d9dac1ef1184bcad83c8d2823bee655a794738913c26538b"} Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.400420 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.561764 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-catalog-content\") pod \"54d5f5e1-a17b-4298-8306-30b66f381462\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.561936 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-utilities\") pod \"54d5f5e1-a17b-4298-8306-30b66f381462\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.562069 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtll2\" (UniqueName: \"kubernetes.io/projected/54d5f5e1-a17b-4298-8306-30b66f381462-kube-api-access-dtll2\") pod \"54d5f5e1-a17b-4298-8306-30b66f381462\" (UID: \"54d5f5e1-a17b-4298-8306-30b66f381462\") " Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.562908 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-utilities" (OuterVolumeSpecName: "utilities") pod "54d5f5e1-a17b-4298-8306-30b66f381462" (UID: "54d5f5e1-a17b-4298-8306-30b66f381462"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.568842 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54d5f5e1-a17b-4298-8306-30b66f381462-kube-api-access-dtll2" (OuterVolumeSpecName: "kube-api-access-dtll2") pod "54d5f5e1-a17b-4298-8306-30b66f381462" (UID: "54d5f5e1-a17b-4298-8306-30b66f381462"). InnerVolumeSpecName "kube-api-access-dtll2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.633675 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54d5f5e1-a17b-4298-8306-30b66f381462" (UID: "54d5f5e1-a17b-4298-8306-30b66f381462"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.664382 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.664418 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54d5f5e1-a17b-4298-8306-30b66f381462-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:17:54 crc kubenswrapper[4911]: I0606 10:17:54.664430 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtll2\" (UniqueName: \"kubernetes.io/projected/54d5f5e1-a17b-4298-8306-30b66f381462-kube-api-access-dtll2\") on node \"crc\" DevicePath \"\"" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.040333 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ct4ph" event={"ID":"54d5f5e1-a17b-4298-8306-30b66f381462","Type":"ContainerDied","Data":"0f19c248fe0cdc8c3e8947c9a9cbfd024200873da2fe54b07e9a079cc8f0ec6c"} Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.040390 4911 scope.go:117] "RemoveContainer" containerID="a621d21971440a53d9dac1ef1184bcad83c8d2823bee655a794738913c26538b" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.040410 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ct4ph" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.073285 4911 scope.go:117] "RemoveContainer" containerID="b1139b7f9ab2aeb4315b8d9e517b1451893df5e7ee15e8c9e908957bb312864f" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.078358 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ct4ph"] Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.085533 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ct4ph"] Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.110022 4911 scope.go:117] "RemoveContainer" containerID="fc3e91cd1a217359a3a33cebacc190fb8cb376d5453708dcca3fe8058490af1c" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.387830 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.388327 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.437292 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:55 crc kubenswrapper[4911]: I0606 10:17:55.963033 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" path="/var/lib/kubelet/pods/54d5f5e1-a17b-4298-8306-30b66f381462/volumes" Jun 06 10:17:56 crc kubenswrapper[4911]: I0606 10:17:56.109783 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:17:57 crc kubenswrapper[4911]: I0606 10:17:57.602138 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4rdt8"] Jun 06 10:17:57 crc kubenswrapper[4911]: I0606 10:17:57.948694 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:17:58 crc kubenswrapper[4911]: I0606 10:17:58.081824 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4rdt8" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="registry-server" containerID="cri-o://8f28f3227fc270358e17cf9b2e0a586da32280e30a74896172f891afd11a286c" gracePeriod=2 Jun 06 10:17:59 crc kubenswrapper[4911]: I0606 10:17:59.091976 4911 generic.go:334] "Generic (PLEG): container finished" podID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerID="8f28f3227fc270358e17cf9b2e0a586da32280e30a74896172f891afd11a286c" exitCode=0 Jun 06 10:17:59 crc kubenswrapper[4911]: I0606 10:17:59.092168 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rdt8" event={"ID":"49476096-c8e5-4a37-9b5f-32c8b51ab8cb","Type":"ContainerDied","Data":"8f28f3227fc270358e17cf9b2e0a586da32280e30a74896172f891afd11a286c"} Jun 06 10:17:59 crc kubenswrapper[4911]: I0606 10:17:59.095309 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"2ffdccc55218e0a3d0931171f34099cd1d7e39076107fabce4a8a019d0583152"} Jun 06 10:17:59 crc kubenswrapper[4911]: I0606 10:17:59.957556 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.072531 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-catalog-content\") pod \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.072726 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfjzg\" (UniqueName: \"kubernetes.io/projected/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-kube-api-access-cfjzg\") pod \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.073108 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-utilities\") pod \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\" (UID: \"49476096-c8e5-4a37-9b5f-32c8b51ab8cb\") " Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.077338 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-utilities" (OuterVolumeSpecName: "utilities") pod "49476096-c8e5-4a37-9b5f-32c8b51ab8cb" (UID: "49476096-c8e5-4a37-9b5f-32c8b51ab8cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.081137 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-kube-api-access-cfjzg" (OuterVolumeSpecName: "kube-api-access-cfjzg") pod "49476096-c8e5-4a37-9b5f-32c8b51ab8cb" (UID: "49476096-c8e5-4a37-9b5f-32c8b51ab8cb"). InnerVolumeSpecName "kube-api-access-cfjzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.113390 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "49476096-c8e5-4a37-9b5f-32c8b51ab8cb" (UID: "49476096-c8e5-4a37-9b5f-32c8b51ab8cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.115568 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4rdt8" event={"ID":"49476096-c8e5-4a37-9b5f-32c8b51ab8cb","Type":"ContainerDied","Data":"7350882dffc8d53f420094643ea1475e8d0125551bc9e7155e30f6cf1a389cb9"} Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.115643 4911 scope.go:117] "RemoveContainer" containerID="8f28f3227fc270358e17cf9b2e0a586da32280e30a74896172f891afd11a286c" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.115725 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4rdt8" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.164593 4911 scope.go:117] "RemoveContainer" containerID="e86762a831a3899c810ce6862905f874e2e0a7e94fde1d64707632f5946cea17" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.168061 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4rdt8"] Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.178320 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.178356 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.178369 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfjzg\" (UniqueName: \"kubernetes.io/projected/49476096-c8e5-4a37-9b5f-32c8b51ab8cb-kube-api-access-cfjzg\") on node \"crc\" DevicePath \"\"" Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.180393 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4rdt8"] Jun 06 10:18:00 crc kubenswrapper[4911]: I0606 10:18:00.203515 4911 scope.go:117] "RemoveContainer" containerID="1736365f29dc0e33755b0dea7a396731f82dcf44088378b6d7ba8f706c95ac7e" Jun 06 10:18:01 crc kubenswrapper[4911]: I0606 10:18:01.965722 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" path="/var/lib/kubelet/pods/49476096-c8e5-4a37-9b5f-32c8b51ab8cb/volumes" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.351692 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-rmg9b"] Jun 06 10:18:02 crc kubenswrapper[4911]: E0606 10:18:02.352556 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="extract-utilities" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.352580 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="extract-utilities" Jun 06 10:18:02 crc kubenswrapper[4911]: E0606 10:18:02.352612 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="extract-utilities" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.352621 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="extract-utilities" Jun 06 10:18:02 crc kubenswrapper[4911]: E0606 10:18:02.352640 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="registry-server" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.352648 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="registry-server" Jun 06 10:18:02 crc kubenswrapper[4911]: E0606 10:18:02.352669 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="extract-content" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.352677 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="extract-content" Jun 06 10:18:02 crc kubenswrapper[4911]: E0606 10:18:02.352690 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="registry-server" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.352700 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="registry-server" Jun 06 10:18:02 crc kubenswrapper[4911]: E0606 10:18:02.352719 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="extract-content" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.352727 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="extract-content" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.352982 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="54d5f5e1-a17b-4298-8306-30b66f381462" containerName="registry-server" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.353002 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="49476096-c8e5-4a37-9b5f-32c8b51ab8cb" containerName="registry-server" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.353782 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.524515 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqw2c\" (UniqueName: \"kubernetes.io/projected/0ba580c8-5e36-4076-bbd3-ecff91ddb411-kube-api-access-mqw2c\") pod \"crc-debug-rmg9b\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.524600 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba580c8-5e36-4076-bbd3-ecff91ddb411-host\") pod \"crc-debug-rmg9b\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.626890 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba580c8-5e36-4076-bbd3-ecff91ddb411-host\") pod \"crc-debug-rmg9b\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.627077 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqw2c\" (UniqueName: \"kubernetes.io/projected/0ba580c8-5e36-4076-bbd3-ecff91ddb411-kube-api-access-mqw2c\") pod \"crc-debug-rmg9b\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.627353 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba580c8-5e36-4076-bbd3-ecff91ddb411-host\") pod \"crc-debug-rmg9b\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.650029 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqw2c\" (UniqueName: \"kubernetes.io/projected/0ba580c8-5e36-4076-bbd3-ecff91ddb411-kube-api-access-mqw2c\") pod \"crc-debug-rmg9b\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: I0606 10:18:02.681500 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rmg9b" Jun 06 10:18:02 crc kubenswrapper[4911]: W0606 10:18:02.721826 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ba580c8_5e36_4076_bbd3_ecff91ddb411.slice/crio-8103b2e5e77050ceedf44ccd78ed747d0c2a17a9fab036a63accf08fabdfebc5 WatchSource:0}: Error finding container 8103b2e5e77050ceedf44ccd78ed747d0c2a17a9fab036a63accf08fabdfebc5: Status 404 returned error can't find the container with id 8103b2e5e77050ceedf44ccd78ed747d0c2a17a9fab036a63accf08fabdfebc5 Jun 06 10:18:03 crc kubenswrapper[4911]: I0606 10:18:03.144382 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-rmg9b" event={"ID":"0ba580c8-5e36-4076-bbd3-ecff91ddb411","Type":"ContainerStarted","Data":"8103b2e5e77050ceedf44ccd78ed747d0c2a17a9fab036a63accf08fabdfebc5"} Jun 06 10:18:04 crc kubenswrapper[4911]: I0606 10:18:04.153526 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-rmg9b" event={"ID":"0ba580c8-5e36-4076-bbd3-ecff91ddb411","Type":"ContainerStarted","Data":"65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803"} Jun 06 10:18:04 crc kubenswrapper[4911]: I0606 10:18:04.169413 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-rmg9b" podStartSLOduration=2.169393696 podStartE2EDuration="2.169393696s" podCreationTimestamp="2025-06-06 10:18:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:18:04.168638956 +0000 UTC m=+3895.444064499" watchObservedRunningTime="2025-06-06 10:18:04.169393696 +0000 UTC m=+3895.444819239" Jun 06 10:18:13 crc kubenswrapper[4911]: I0606 10:18:13.936478 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-rmg9b"] Jun 06 10:18:13 crc kubenswrapper[4911]: I0606 10:18:13.938579 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-rmg9b" podUID="0ba580c8-5e36-4076-bbd3-ecff91ddb411" containerName="container-00" containerID="cri-o://65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803" gracePeriod=2 Jun 06 10:18:13 crc kubenswrapper[4911]: I0606 10:18:13.980025 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-rmg9b"] Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.112439 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rmg9b" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.199829 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqw2c\" (UniqueName: \"kubernetes.io/projected/0ba580c8-5e36-4076-bbd3-ecff91ddb411-kube-api-access-mqw2c\") pod \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.200041 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba580c8-5e36-4076-bbd3-ecff91ddb411-host\") pod \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\" (UID: \"0ba580c8-5e36-4076-bbd3-ecff91ddb411\") " Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.200197 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0ba580c8-5e36-4076-bbd3-ecff91ddb411-host" (OuterVolumeSpecName: "host") pod "0ba580c8-5e36-4076-bbd3-ecff91ddb411" (UID: "0ba580c8-5e36-4076-bbd3-ecff91ddb411"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.200714 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0ba580c8-5e36-4076-bbd3-ecff91ddb411-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.205956 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ba580c8-5e36-4076-bbd3-ecff91ddb411-kube-api-access-mqw2c" (OuterVolumeSpecName: "kube-api-access-mqw2c") pod "0ba580c8-5e36-4076-bbd3-ecff91ddb411" (UID: "0ba580c8-5e36-4076-bbd3-ecff91ddb411"). InnerVolumeSpecName "kube-api-access-mqw2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.264624 4911 generic.go:334] "Generic (PLEG): container finished" podID="0ba580c8-5e36-4076-bbd3-ecff91ddb411" containerID="65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803" exitCode=0 Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.264687 4911 scope.go:117] "RemoveContainer" containerID="65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.264701 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rmg9b" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.293977 4911 scope.go:117] "RemoveContainer" containerID="65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803" Jun 06 10:18:14 crc kubenswrapper[4911]: E0606 10:18:14.294409 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803\": container with ID starting with 65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803 not found: ID does not exist" containerID="65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.294452 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803"} err="failed to get container status \"65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803\": rpc error: code = NotFound desc = could not find container \"65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803\": container with ID starting with 65d887bd8d88330524657ce406dc4d9af09786b8c25736a1a08e50781f71c803 not found: ID does not exist" Jun 06 10:18:14 crc kubenswrapper[4911]: I0606 10:18:14.302929 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqw2c\" (UniqueName: \"kubernetes.io/projected/0ba580c8-5e36-4076-bbd3-ecff91ddb411-kube-api-access-mqw2c\") on node \"crc\" DevicePath \"\"" Jun 06 10:18:15 crc kubenswrapper[4911]: I0606 10:18:15.958367 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ba580c8-5e36-4076-bbd3-ecff91ddb411" path="/var/lib/kubelet/pods/0ba580c8-5e36-4076-bbd3-ecff91ddb411/volumes" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.364674 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-6m8hd"] Jun 06 10:19:02 crc kubenswrapper[4911]: E0606 10:19:02.365578 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ba580c8-5e36-4076-bbd3-ecff91ddb411" containerName="container-00" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.365591 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ba580c8-5e36-4076-bbd3-ecff91ddb411" containerName="container-00" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.365765 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ba580c8-5e36-4076-bbd3-ecff91ddb411" containerName="container-00" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.366410 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6m8hd" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.549032 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46230995-c289-485b-868d-b363b19886b9-host\") pod \"crc-debug-6m8hd\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " pod="openstack/crc-debug-6m8hd" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.549171 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb84q\" (UniqueName: \"kubernetes.io/projected/46230995-c289-485b-868d-b363b19886b9-kube-api-access-lb84q\") pod \"crc-debug-6m8hd\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " pod="openstack/crc-debug-6m8hd" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.650739 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46230995-c289-485b-868d-b363b19886b9-host\") pod \"crc-debug-6m8hd\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " pod="openstack/crc-debug-6m8hd" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.650854 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb84q\" (UniqueName: \"kubernetes.io/projected/46230995-c289-485b-868d-b363b19886b9-kube-api-access-lb84q\") pod \"crc-debug-6m8hd\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " pod="openstack/crc-debug-6m8hd" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.651347 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46230995-c289-485b-868d-b363b19886b9-host\") pod \"crc-debug-6m8hd\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " pod="openstack/crc-debug-6m8hd" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.670234 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb84q\" (UniqueName: \"kubernetes.io/projected/46230995-c289-485b-868d-b363b19886b9-kube-api-access-lb84q\") pod \"crc-debug-6m8hd\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " pod="openstack/crc-debug-6m8hd" Jun 06 10:19:02 crc kubenswrapper[4911]: I0606 10:19:02.688375 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6m8hd" Jun 06 10:19:03 crc kubenswrapper[4911]: I0606 10:19:03.725286 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6m8hd" event={"ID":"46230995-c289-485b-868d-b363b19886b9","Type":"ContainerStarted","Data":"c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d"} Jun 06 10:19:03 crc kubenswrapper[4911]: I0606 10:19:03.725913 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6m8hd" event={"ID":"46230995-c289-485b-868d-b363b19886b9","Type":"ContainerStarted","Data":"a49f639c50657a5d1447a79c124abee05572d586275534ac67da170ad93d3281"} Jun 06 10:19:03 crc kubenswrapper[4911]: I0606 10:19:03.751317 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-6m8hd" podStartSLOduration=1.751291342 podStartE2EDuration="1.751291342s" podCreationTimestamp="2025-06-06 10:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:19:03.739296022 +0000 UTC m=+3955.014721575" watchObservedRunningTime="2025-06-06 10:19:03.751291342 +0000 UTC m=+3955.026716885" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.361308 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-6m8hd"] Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.362002 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-6m8hd" podUID="46230995-c289-485b-868d-b363b19886b9" containerName="container-00" containerID="cri-o://c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d" gracePeriod=2 Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.370538 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-6m8hd"] Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.615006 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6m8hd" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.796518 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46230995-c289-485b-868d-b363b19886b9-host\") pod \"46230995-c289-485b-868d-b363b19886b9\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.796592 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46230995-c289-485b-868d-b363b19886b9-host" (OuterVolumeSpecName: "host") pod "46230995-c289-485b-868d-b363b19886b9" (UID: "46230995-c289-485b-868d-b363b19886b9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.796603 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb84q\" (UniqueName: \"kubernetes.io/projected/46230995-c289-485b-868d-b363b19886b9-kube-api-access-lb84q\") pod \"46230995-c289-485b-868d-b363b19886b9\" (UID: \"46230995-c289-485b-868d-b363b19886b9\") " Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.797394 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/46230995-c289-485b-868d-b363b19886b9-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.828008 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46230995-c289-485b-868d-b363b19886b9-kube-api-access-lb84q" (OuterVolumeSpecName: "kube-api-access-lb84q") pod "46230995-c289-485b-868d-b363b19886b9" (UID: "46230995-c289-485b-868d-b363b19886b9"). InnerVolumeSpecName "kube-api-access-lb84q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.832901 4911 generic.go:334] "Generic (PLEG): container finished" podID="46230995-c289-485b-868d-b363b19886b9" containerID="c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d" exitCode=0 Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.832966 4911 scope.go:117] "RemoveContainer" containerID="c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.832989 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6m8hd" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.875331 4911 scope.go:117] "RemoveContainer" containerID="c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d" Jun 06 10:19:13 crc kubenswrapper[4911]: E0606 10:19:13.875923 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d\": container with ID starting with c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d not found: ID does not exist" containerID="c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.875974 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d"} err="failed to get container status \"c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d\": rpc error: code = NotFound desc = could not find container \"c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d\": container with ID starting with c840a07f4c3581e730d52c8caaeb8b0d25cc6911ffc83d82fb55e76e42cf704d not found: ID does not exist" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.899528 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb84q\" (UniqueName: \"kubernetes.io/projected/46230995-c289-485b-868d-b363b19886b9-kube-api-access-lb84q\") on node \"crc\" DevicePath \"\"" Jun 06 10:19:13 crc kubenswrapper[4911]: I0606 10:19:13.959726 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46230995-c289-485b-868d-b363b19886b9" path="/var/lib/kubelet/pods/46230995-c289-485b-868d-b363b19886b9/volumes" Jun 06 10:19:19 crc kubenswrapper[4911]: I0606 10:19:19.797796 4911 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-59f44bc869-n7dl7" podUID="72e5a926-1c68-4e9b-9240-44c27d488e36" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Jun 06 10:20:01 crc kubenswrapper[4911]: I0606 10:20:01.835647 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-lt4tf"] Jun 06 10:20:01 crc kubenswrapper[4911]: E0606 10:20:01.837169 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46230995-c289-485b-868d-b363b19886b9" containerName="container-00" Jun 06 10:20:01 crc kubenswrapper[4911]: I0606 10:20:01.837188 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="46230995-c289-485b-868d-b363b19886b9" containerName="container-00" Jun 06 10:20:01 crc kubenswrapper[4911]: I0606 10:20:01.837400 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="46230995-c289-485b-868d-b363b19886b9" containerName="container-00" Jun 06 10:20:01 crc kubenswrapper[4911]: I0606 10:20:01.838032 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lt4tf" Jun 06 10:20:01 crc kubenswrapper[4911]: I0606 10:20:01.906985 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct7f7\" (UniqueName: \"kubernetes.io/projected/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-kube-api-access-ct7f7\") pod \"crc-debug-lt4tf\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " pod="openstack/crc-debug-lt4tf" Jun 06 10:20:01 crc kubenswrapper[4911]: I0606 10:20:01.907297 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-host\") pod \"crc-debug-lt4tf\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " pod="openstack/crc-debug-lt4tf" Jun 06 10:20:02 crc kubenswrapper[4911]: I0606 10:20:02.009137 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-host\") pod \"crc-debug-lt4tf\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " pod="openstack/crc-debug-lt4tf" Jun 06 10:20:02 crc kubenswrapper[4911]: I0606 10:20:02.009372 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct7f7\" (UniqueName: \"kubernetes.io/projected/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-kube-api-access-ct7f7\") pod \"crc-debug-lt4tf\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " pod="openstack/crc-debug-lt4tf" Jun 06 10:20:02 crc kubenswrapper[4911]: I0606 10:20:02.009743 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-host\") pod \"crc-debug-lt4tf\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " pod="openstack/crc-debug-lt4tf" Jun 06 10:20:02 crc kubenswrapper[4911]: I0606 10:20:02.238338 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct7f7\" (UniqueName: \"kubernetes.io/projected/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-kube-api-access-ct7f7\") pod \"crc-debug-lt4tf\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " pod="openstack/crc-debug-lt4tf" Jun 06 10:20:02 crc kubenswrapper[4911]: I0606 10:20:02.477360 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lt4tf" Jun 06 10:20:03 crc kubenswrapper[4911]: I0606 10:20:03.278302 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-lt4tf" event={"ID":"1eed01f9-c4c4-4f47-aaf6-19d849d80d48","Type":"ContainerStarted","Data":"68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036"} Jun 06 10:20:03 crc kubenswrapper[4911]: I0606 10:20:03.278652 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-lt4tf" event={"ID":"1eed01f9-c4c4-4f47-aaf6-19d849d80d48","Type":"ContainerStarted","Data":"2d2ed939887519a1d86ab8b2d66718b2e430e682f29052c3b7c1b3eee858b91e"} Jun 06 10:20:03 crc kubenswrapper[4911]: I0606 10:20:03.295235 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-lt4tf" podStartSLOduration=2.295209404 podStartE2EDuration="2.295209404s" podCreationTimestamp="2025-06-06 10:20:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:20:03.291804186 +0000 UTC m=+4014.567229729" watchObservedRunningTime="2025-06-06 10:20:03.295209404 +0000 UTC m=+4014.570634937" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.120200 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-lt4tf"] Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.121327 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-lt4tf" podUID="1eed01f9-c4c4-4f47-aaf6-19d849d80d48" containerName="container-00" containerID="cri-o://68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036" gracePeriod=2 Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.127889 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-lt4tf"] Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.214414 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lt4tf" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.353888 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct7f7\" (UniqueName: \"kubernetes.io/projected/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-kube-api-access-ct7f7\") pod \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.354315 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-host\") pod \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\" (UID: \"1eed01f9-c4c4-4f47-aaf6-19d849d80d48\") " Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.354467 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-host" (OuterVolumeSpecName: "host") pod "1eed01f9-c4c4-4f47-aaf6-19d849d80d48" (UID: "1eed01f9-c4c4-4f47-aaf6-19d849d80d48"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.355058 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.362049 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-kube-api-access-ct7f7" (OuterVolumeSpecName: "kube-api-access-ct7f7") pod "1eed01f9-c4c4-4f47-aaf6-19d849d80d48" (UID: "1eed01f9-c4c4-4f47-aaf6-19d849d80d48"). InnerVolumeSpecName "kube-api-access-ct7f7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.370910 4911 generic.go:334] "Generic (PLEG): container finished" podID="1eed01f9-c4c4-4f47-aaf6-19d849d80d48" containerID="68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036" exitCode=0 Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.370994 4911 scope.go:117] "RemoveContainer" containerID="68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.370999 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lt4tf" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.453045 4911 scope.go:117] "RemoveContainer" containerID="68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036" Jun 06 10:20:13 crc kubenswrapper[4911]: E0606 10:20:13.453782 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036\": container with ID starting with 68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036 not found: ID does not exist" containerID="68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.453867 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036"} err="failed to get container status \"68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036\": rpc error: code = NotFound desc = could not find container \"68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036\": container with ID starting with 68309d3610addb1a5b9a5c03594e29cf275a0eaf5f3c3795002eb63fba410036 not found: ID does not exist" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.457312 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct7f7\" (UniqueName: \"kubernetes.io/projected/1eed01f9-c4c4-4f47-aaf6-19d849d80d48-kube-api-access-ct7f7\") on node \"crc\" DevicePath \"\"" Jun 06 10:20:13 crc kubenswrapper[4911]: I0606 10:20:13.960841 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eed01f9-c4c4-4f47-aaf6-19d849d80d48" path="/var/lib/kubelet/pods/1eed01f9-c4c4-4f47-aaf6-19d849d80d48/volumes" Jun 06 10:20:24 crc kubenswrapper[4911]: I0606 10:20:24.299703 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:20:24 crc kubenswrapper[4911]: I0606 10:20:24.300138 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:20:54 crc kubenswrapper[4911]: I0606 10:20:54.300564 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:20:54 crc kubenswrapper[4911]: I0606 10:20:54.301419 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.566550 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-gd99p"] Jun 06 10:21:01 crc kubenswrapper[4911]: E0606 10:21:01.567451 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eed01f9-c4c4-4f47-aaf6-19d849d80d48" containerName="container-00" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.567464 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eed01f9-c4c4-4f47-aaf6-19d849d80d48" containerName="container-00" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.567683 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eed01f9-c4c4-4f47-aaf6-19d849d80d48" containerName="container-00" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.568337 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gd99p" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.717609 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgskw\" (UniqueName: \"kubernetes.io/projected/c487f81c-b669-44df-8e50-4cd6ddcf440e-kube-api-access-kgskw\") pod \"crc-debug-gd99p\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " pod="openstack/crc-debug-gd99p" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.718686 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c487f81c-b669-44df-8e50-4cd6ddcf440e-host\") pod \"crc-debug-gd99p\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " pod="openstack/crc-debug-gd99p" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.821032 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgskw\" (UniqueName: \"kubernetes.io/projected/c487f81c-b669-44df-8e50-4cd6ddcf440e-kube-api-access-kgskw\") pod \"crc-debug-gd99p\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " pod="openstack/crc-debug-gd99p" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.821179 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c487f81c-b669-44df-8e50-4cd6ddcf440e-host\") pod \"crc-debug-gd99p\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " pod="openstack/crc-debug-gd99p" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.821300 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c487f81c-b669-44df-8e50-4cd6ddcf440e-host\") pod \"crc-debug-gd99p\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " pod="openstack/crc-debug-gd99p" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.844840 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgskw\" (UniqueName: \"kubernetes.io/projected/c487f81c-b669-44df-8e50-4cd6ddcf440e-kube-api-access-kgskw\") pod \"crc-debug-gd99p\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " pod="openstack/crc-debug-gd99p" Jun 06 10:21:01 crc kubenswrapper[4911]: I0606 10:21:01.896294 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gd99p" Jun 06 10:21:02 crc kubenswrapper[4911]: I0606 10:21:02.816538 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-gd99p" event={"ID":"c487f81c-b669-44df-8e50-4cd6ddcf440e","Type":"ContainerStarted","Data":"b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b"} Jun 06 10:21:02 crc kubenswrapper[4911]: I0606 10:21:02.816895 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-gd99p" event={"ID":"c487f81c-b669-44df-8e50-4cd6ddcf440e","Type":"ContainerStarted","Data":"d93d41fff7bcb7f5ac9f5f8da7a2fd50f1864687cb5ec675430078a5f4de56e9"} Jun 06 10:21:02 crc kubenswrapper[4911]: I0606 10:21:02.831813 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-gd99p" podStartSLOduration=1.831800656 podStartE2EDuration="1.831800656s" podCreationTimestamp="2025-06-06 10:21:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:21:02.828515151 +0000 UTC m=+4074.103940694" watchObservedRunningTime="2025-06-06 10:21:02.831800656 +0000 UTC m=+4074.107226209" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.500022 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-gd99p"] Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.500960 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-gd99p" podUID="c487f81c-b669-44df-8e50-4cd6ddcf440e" containerName="container-00" containerID="cri-o://b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b" gracePeriod=2 Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.512181 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-gd99p"] Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.610753 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gd99p" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.649936 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgskw\" (UniqueName: \"kubernetes.io/projected/c487f81c-b669-44df-8e50-4cd6ddcf440e-kube-api-access-kgskw\") pod \"c487f81c-b669-44df-8e50-4cd6ddcf440e\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.650008 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c487f81c-b669-44df-8e50-4cd6ddcf440e-host\") pod \"c487f81c-b669-44df-8e50-4cd6ddcf440e\" (UID: \"c487f81c-b669-44df-8e50-4cd6ddcf440e\") " Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.650171 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c487f81c-b669-44df-8e50-4cd6ddcf440e-host" (OuterVolumeSpecName: "host") pod "c487f81c-b669-44df-8e50-4cd6ddcf440e" (UID: "c487f81c-b669-44df-8e50-4cd6ddcf440e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.650630 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c487f81c-b669-44df-8e50-4cd6ddcf440e-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.655226 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c487f81c-b669-44df-8e50-4cd6ddcf440e-kube-api-access-kgskw" (OuterVolumeSpecName: "kube-api-access-kgskw") pod "c487f81c-b669-44df-8e50-4cd6ddcf440e" (UID: "c487f81c-b669-44df-8e50-4cd6ddcf440e"). InnerVolumeSpecName "kube-api-access-kgskw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.752915 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgskw\" (UniqueName: \"kubernetes.io/projected/c487f81c-b669-44df-8e50-4cd6ddcf440e-kube-api-access-kgskw\") on node \"crc\" DevicePath \"\"" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.924946 4911 generic.go:334] "Generic (PLEG): container finished" podID="c487f81c-b669-44df-8e50-4cd6ddcf440e" containerID="b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b" exitCode=0 Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.925033 4911 scope.go:117] "RemoveContainer" containerID="b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.925599 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-gd99p" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.959021 4911 scope.go:117] "RemoveContainer" containerID="b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b" Jun 06 10:21:12 crc kubenswrapper[4911]: E0606 10:21:12.959802 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b\": container with ID starting with b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b not found: ID does not exist" containerID="b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b" Jun 06 10:21:12 crc kubenswrapper[4911]: I0606 10:21:12.959853 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b"} err="failed to get container status \"b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b\": rpc error: code = NotFound desc = could not find container \"b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b\": container with ID starting with b4e8cb3a93a0b3e9210c45c4c5beba5e61776a3c413cbb8aacb70f21e949143b not found: ID does not exist" Jun 06 10:21:13 crc kubenswrapper[4911]: I0606 10:21:13.960646 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c487f81c-b669-44df-8e50-4cd6ddcf440e" path="/var/lib/kubelet/pods/c487f81c-b669-44df-8e50-4cd6ddcf440e/volumes" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.411375 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xc6bt"] Jun 06 10:21:21 crc kubenswrapper[4911]: E0606 10:21:21.412372 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c487f81c-b669-44df-8e50-4cd6ddcf440e" containerName="container-00" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.412387 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c487f81c-b669-44df-8e50-4cd6ddcf440e" containerName="container-00" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.412580 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c487f81c-b669-44df-8e50-4cd6ddcf440e" containerName="container-00" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.414151 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.420971 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xc6bt"] Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.455219 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsjr5\" (UniqueName: \"kubernetes.io/projected/7d77ec01-5ee9-4785-bab0-41a2e1af534d-kube-api-access-dsjr5\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.455341 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-catalog-content\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.455377 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-utilities\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.556550 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsjr5\" (UniqueName: \"kubernetes.io/projected/7d77ec01-5ee9-4785-bab0-41a2e1af534d-kube-api-access-dsjr5\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.556843 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-catalog-content\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.556965 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-utilities\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.557494 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-utilities\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.557571 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-catalog-content\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.587901 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsjr5\" (UniqueName: \"kubernetes.io/projected/7d77ec01-5ee9-4785-bab0-41a2e1af534d-kube-api-access-dsjr5\") pod \"redhat-marketplace-xc6bt\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:21 crc kubenswrapper[4911]: I0606 10:21:21.779300 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:22 crc kubenswrapper[4911]: I0606 10:21:22.427785 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xc6bt"] Jun 06 10:21:23 crc kubenswrapper[4911]: I0606 10:21:23.020647 4911 generic.go:334] "Generic (PLEG): container finished" podID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerID="0dd16bf17046e4c9036e5b276673fdf5e470df5b060e4fc00b6ce5a2cc7ad510" exitCode=0 Jun 06 10:21:23 crc kubenswrapper[4911]: I0606 10:21:23.020999 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xc6bt" event={"ID":"7d77ec01-5ee9-4785-bab0-41a2e1af534d","Type":"ContainerDied","Data":"0dd16bf17046e4c9036e5b276673fdf5e470df5b060e4fc00b6ce5a2cc7ad510"} Jun 06 10:21:23 crc kubenswrapper[4911]: I0606 10:21:23.021028 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xc6bt" event={"ID":"7d77ec01-5ee9-4785-bab0-41a2e1af534d","Type":"ContainerStarted","Data":"7e3d45ec656b21b3a2c2fe808b10c442ab721fdd14dd8c8c891a9858c174ad02"} Jun 06 10:21:23 crc kubenswrapper[4911]: I0606 10:21:23.022814 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:21:24 crc kubenswrapper[4911]: I0606 10:21:24.033696 4911 generic.go:334] "Generic (PLEG): container finished" podID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerID="509f0a96814562b911c22a0cc836aeaa7a0aef7df135963b57e5240ec96d17cf" exitCode=0 Jun 06 10:21:24 crc kubenswrapper[4911]: I0606 10:21:24.034228 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xc6bt" event={"ID":"7d77ec01-5ee9-4785-bab0-41a2e1af534d","Type":"ContainerDied","Data":"509f0a96814562b911c22a0cc836aeaa7a0aef7df135963b57e5240ec96d17cf"} Jun 06 10:21:24 crc kubenswrapper[4911]: I0606 10:21:24.299824 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:21:24 crc kubenswrapper[4911]: I0606 10:21:24.300100 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:21:24 crc kubenswrapper[4911]: I0606 10:21:24.300143 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:21:24 crc kubenswrapper[4911]: I0606 10:21:24.300847 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2ffdccc55218e0a3d0931171f34099cd1d7e39076107fabce4a8a019d0583152"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:21:24 crc kubenswrapper[4911]: I0606 10:21:24.300905 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://2ffdccc55218e0a3d0931171f34099cd1d7e39076107fabce4a8a019d0583152" gracePeriod=600 Jun 06 10:21:25 crc kubenswrapper[4911]: I0606 10:21:25.044546 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="2ffdccc55218e0a3d0931171f34099cd1d7e39076107fabce4a8a019d0583152" exitCode=0 Jun 06 10:21:25 crc kubenswrapper[4911]: I0606 10:21:25.044765 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"2ffdccc55218e0a3d0931171f34099cd1d7e39076107fabce4a8a019d0583152"} Jun 06 10:21:25 crc kubenswrapper[4911]: I0606 10:21:25.044975 4911 scope.go:117] "RemoveContainer" containerID="b06fdf1e95bcd308c3427b8bab0513ed8d53ed39b0402fd7bcb0e7fdb3ef7bbb" Jun 06 10:21:26 crc kubenswrapper[4911]: I0606 10:21:26.057475 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112"} Jun 06 10:21:26 crc kubenswrapper[4911]: I0606 10:21:26.060810 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xc6bt" event={"ID":"7d77ec01-5ee9-4785-bab0-41a2e1af534d","Type":"ContainerStarted","Data":"e4cf0aa3066b170ccbe8719a31a92b50372dcf33f582d8369d5b5987fbbf8a07"} Jun 06 10:21:26 crc kubenswrapper[4911]: I0606 10:21:26.103018 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xc6bt" podStartSLOduration=2.503750014 podStartE2EDuration="5.102996571s" podCreationTimestamp="2025-06-06 10:21:21 +0000 UTC" firstStartedPulling="2025-06-06 10:21:23.022535731 +0000 UTC m=+4094.297961274" lastFinishedPulling="2025-06-06 10:21:25.621782288 +0000 UTC m=+4096.897207831" observedRunningTime="2025-06-06 10:21:26.097588302 +0000 UTC m=+4097.373013835" watchObservedRunningTime="2025-06-06 10:21:26.102996571 +0000 UTC m=+4097.378422114" Jun 06 10:21:31 crc kubenswrapper[4911]: I0606 10:21:31.779521 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:31 crc kubenswrapper[4911]: I0606 10:21:31.781473 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:31 crc kubenswrapper[4911]: I0606 10:21:31.831894 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:32 crc kubenswrapper[4911]: I0606 10:21:32.168822 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:32 crc kubenswrapper[4911]: I0606 10:21:32.223755 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xc6bt"] Jun 06 10:21:34 crc kubenswrapper[4911]: I0606 10:21:34.140653 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xc6bt" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="registry-server" containerID="cri-o://e4cf0aa3066b170ccbe8719a31a92b50372dcf33f582d8369d5b5987fbbf8a07" gracePeriod=2 Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.159797 4911 generic.go:334] "Generic (PLEG): container finished" podID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerID="e4cf0aa3066b170ccbe8719a31a92b50372dcf33f582d8369d5b5987fbbf8a07" exitCode=0 Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.159889 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xc6bt" event={"ID":"7d77ec01-5ee9-4785-bab0-41a2e1af534d","Type":"ContainerDied","Data":"e4cf0aa3066b170ccbe8719a31a92b50372dcf33f582d8369d5b5987fbbf8a07"} Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.604024 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.728635 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsjr5\" (UniqueName: \"kubernetes.io/projected/7d77ec01-5ee9-4785-bab0-41a2e1af534d-kube-api-access-dsjr5\") pod \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.729174 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-utilities\") pod \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.729384 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-catalog-content\") pod \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\" (UID: \"7d77ec01-5ee9-4785-bab0-41a2e1af534d\") " Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.731250 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-utilities" (OuterVolumeSpecName: "utilities") pod "7d77ec01-5ee9-4785-bab0-41a2e1af534d" (UID: "7d77ec01-5ee9-4785-bab0-41a2e1af534d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.739470 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d77ec01-5ee9-4785-bab0-41a2e1af534d-kube-api-access-dsjr5" (OuterVolumeSpecName: "kube-api-access-dsjr5") pod "7d77ec01-5ee9-4785-bab0-41a2e1af534d" (UID: "7d77ec01-5ee9-4785-bab0-41a2e1af534d"). InnerVolumeSpecName "kube-api-access-dsjr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.746256 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7d77ec01-5ee9-4785-bab0-41a2e1af534d" (UID: "7d77ec01-5ee9-4785-bab0-41a2e1af534d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.832082 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsjr5\" (UniqueName: \"kubernetes.io/projected/7d77ec01-5ee9-4785-bab0-41a2e1af534d-kube-api-access-dsjr5\") on node \"crc\" DevicePath \"\"" Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.832780 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:21:35 crc kubenswrapper[4911]: I0606 10:21:35.832897 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d77ec01-5ee9-4785-bab0-41a2e1af534d-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:21:36 crc kubenswrapper[4911]: I0606 10:21:36.204889 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xc6bt" Jun 06 10:21:36 crc kubenswrapper[4911]: I0606 10:21:36.204931 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xc6bt" event={"ID":"7d77ec01-5ee9-4785-bab0-41a2e1af534d","Type":"ContainerDied","Data":"7e3d45ec656b21b3a2c2fe808b10c442ab721fdd14dd8c8c891a9858c174ad02"} Jun 06 10:21:36 crc kubenswrapper[4911]: I0606 10:21:36.205033 4911 scope.go:117] "RemoveContainer" containerID="e4cf0aa3066b170ccbe8719a31a92b50372dcf33f582d8369d5b5987fbbf8a07" Jun 06 10:21:36 crc kubenswrapper[4911]: I0606 10:21:36.231417 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xc6bt"] Jun 06 10:21:36 crc kubenswrapper[4911]: I0606 10:21:36.234036 4911 scope.go:117] "RemoveContainer" containerID="509f0a96814562b911c22a0cc836aeaa7a0aef7df135963b57e5240ec96d17cf" Jun 06 10:21:36 crc kubenswrapper[4911]: I0606 10:21:36.239288 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xc6bt"] Jun 06 10:21:36 crc kubenswrapper[4911]: I0606 10:21:36.260802 4911 scope.go:117] "RemoveContainer" containerID="0dd16bf17046e4c9036e5b276673fdf5e470df5b060e4fc00b6ce5a2cc7ad510" Jun 06 10:21:37 crc kubenswrapper[4911]: I0606 10:21:37.958670 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" path="/var/lib/kubelet/pods/7d77ec01-5ee9-4785-bab0-41a2e1af534d/volumes" Jun 06 10:22:01 crc kubenswrapper[4911]: I0606 10:22:01.910602 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-btb9l"] Jun 06 10:22:01 crc kubenswrapper[4911]: E0606 10:22:01.911985 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="registry-server" Jun 06 10:22:01 crc kubenswrapper[4911]: I0606 10:22:01.912006 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="registry-server" Jun 06 10:22:01 crc kubenswrapper[4911]: E0606 10:22:01.912024 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="extract-content" Jun 06 10:22:01 crc kubenswrapper[4911]: I0606 10:22:01.912033 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="extract-content" Jun 06 10:22:01 crc kubenswrapper[4911]: E0606 10:22:01.912051 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="extract-utilities" Jun 06 10:22:01 crc kubenswrapper[4911]: I0606 10:22:01.912060 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="extract-utilities" Jun 06 10:22:01 crc kubenswrapper[4911]: I0606 10:22:01.912313 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d77ec01-5ee9-4785-bab0-41a2e1af534d" containerName="registry-server" Jun 06 10:22:01 crc kubenswrapper[4911]: I0606 10:22:01.913210 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-btb9l" Jun 06 10:22:02 crc kubenswrapper[4911]: I0606 10:22:02.102048 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-host\") pod \"crc-debug-btb9l\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " pod="openstack/crc-debug-btb9l" Jun 06 10:22:02 crc kubenswrapper[4911]: I0606 10:22:02.102471 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4jbz\" (UniqueName: \"kubernetes.io/projected/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-kube-api-access-k4jbz\") pod \"crc-debug-btb9l\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " pod="openstack/crc-debug-btb9l" Jun 06 10:22:02 crc kubenswrapper[4911]: I0606 10:22:02.204112 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4jbz\" (UniqueName: \"kubernetes.io/projected/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-kube-api-access-k4jbz\") pod \"crc-debug-btb9l\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " pod="openstack/crc-debug-btb9l" Jun 06 10:22:02 crc kubenswrapper[4911]: I0606 10:22:02.204283 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-host\") pod \"crc-debug-btb9l\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " pod="openstack/crc-debug-btb9l" Jun 06 10:22:02 crc kubenswrapper[4911]: I0606 10:22:02.204435 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-host\") pod \"crc-debug-btb9l\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " pod="openstack/crc-debug-btb9l" Jun 06 10:22:02 crc kubenswrapper[4911]: I0606 10:22:02.544678 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4jbz\" (UniqueName: \"kubernetes.io/projected/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-kube-api-access-k4jbz\") pod \"crc-debug-btb9l\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " pod="openstack/crc-debug-btb9l" Jun 06 10:22:02 crc kubenswrapper[4911]: I0606 10:22:02.842921 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-btb9l" Jun 06 10:22:03 crc kubenswrapper[4911]: I0606 10:22:03.476046 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-btb9l" event={"ID":"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c","Type":"ContainerStarted","Data":"610cb605c0cee20eeb816d2c5c30a2c649b701133c63c87f96c4a052a8ca24dc"} Jun 06 10:22:04 crc kubenswrapper[4911]: I0606 10:22:04.487079 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-btb9l" event={"ID":"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c","Type":"ContainerStarted","Data":"dcb43cdd4d1a7dfcb7586faca260b51ea75379e5a9f84853bbbfee1058396429"} Jun 06 10:22:04 crc kubenswrapper[4911]: I0606 10:22:04.505734 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-btb9l" podStartSLOduration=3.5057139250000002 podStartE2EDuration="3.505713925s" podCreationTimestamp="2025-06-06 10:22:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:22:04.500921872 +0000 UTC m=+4135.776347425" watchObservedRunningTime="2025-06-06 10:22:04.505713925 +0000 UTC m=+4135.781139468" Jun 06 10:22:13 crc kubenswrapper[4911]: I0606 10:22:13.973518 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-btb9l"] Jun 06 10:22:13 crc kubenswrapper[4911]: I0606 10:22:13.974315 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-btb9l" podUID="33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" containerName="container-00" containerID="cri-o://dcb43cdd4d1a7dfcb7586faca260b51ea75379e5a9f84853bbbfee1058396429" gracePeriod=2 Jun 06 10:22:13 crc kubenswrapper[4911]: I0606 10:22:13.983969 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-btb9l"] Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.581689 4911 generic.go:334] "Generic (PLEG): container finished" podID="33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" containerID="dcb43cdd4d1a7dfcb7586faca260b51ea75379e5a9f84853bbbfee1058396429" exitCode=0 Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.582152 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="610cb605c0cee20eeb816d2c5c30a2c649b701133c63c87f96c4a052a8ca24dc" Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.614170 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-btb9l" Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.770045 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-host\") pod \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.770201 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4jbz\" (UniqueName: \"kubernetes.io/projected/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-kube-api-access-k4jbz\") pod \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\" (UID: \"33bbc3f7-3253-43af-9e3e-d4f3a52dad0c\") " Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.770231 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-host" (OuterVolumeSpecName: "host") pod "33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" (UID: "33bbc3f7-3253-43af-9e3e-d4f3a52dad0c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.770644 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.776080 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-kube-api-access-k4jbz" (OuterVolumeSpecName: "kube-api-access-k4jbz") pod "33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" (UID: "33bbc3f7-3253-43af-9e3e-d4f3a52dad0c"). InnerVolumeSpecName "kube-api-access-k4jbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:22:14 crc kubenswrapper[4911]: I0606 10:22:14.872330 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4jbz\" (UniqueName: \"kubernetes.io/projected/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c-kube-api-access-k4jbz\") on node \"crc\" DevicePath \"\"" Jun 06 10:22:15 crc kubenswrapper[4911]: I0606 10:22:15.590060 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-btb9l" Jun 06 10:22:15 crc kubenswrapper[4911]: I0606 10:22:15.959652 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" path="/var/lib/kubelet/pods/33bbc3f7-3253-43af-9e3e-d4f3a52dad0c/volumes" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.379406 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-5rvrg"] Jun 06 10:23:02 crc kubenswrapper[4911]: E0606 10:23:02.380446 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" containerName="container-00" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.380466 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" containerName="container-00" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.380737 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="33bbc3f7-3253-43af-9e3e-d4f3a52dad0c" containerName="container-00" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.381768 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5rvrg" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.444825 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-host\") pod \"crc-debug-5rvrg\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " pod="openstack/crc-debug-5rvrg" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.444889 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzfn6\" (UniqueName: \"kubernetes.io/projected/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-kube-api-access-gzfn6\") pod \"crc-debug-5rvrg\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " pod="openstack/crc-debug-5rvrg" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.547403 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-host\") pod \"crc-debug-5rvrg\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " pod="openstack/crc-debug-5rvrg" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.547490 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzfn6\" (UniqueName: \"kubernetes.io/projected/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-kube-api-access-gzfn6\") pod \"crc-debug-5rvrg\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " pod="openstack/crc-debug-5rvrg" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.547638 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-host\") pod \"crc-debug-5rvrg\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " pod="openstack/crc-debug-5rvrg" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.570360 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzfn6\" (UniqueName: \"kubernetes.io/projected/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-kube-api-access-gzfn6\") pod \"crc-debug-5rvrg\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " pod="openstack/crc-debug-5rvrg" Jun 06 10:23:02 crc kubenswrapper[4911]: I0606 10:23:02.700041 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5rvrg" Jun 06 10:23:03 crc kubenswrapper[4911]: I0606 10:23:03.020739 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-5rvrg" event={"ID":"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb","Type":"ContainerStarted","Data":"aea351ca975e7c82ff59df05a6a5a050841045210f3e893b93eb444ba45eaebd"} Jun 06 10:23:04 crc kubenswrapper[4911]: I0606 10:23:04.033660 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-5rvrg" event={"ID":"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb","Type":"ContainerStarted","Data":"13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3"} Jun 06 10:23:04 crc kubenswrapper[4911]: I0606 10:23:04.049428 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-5rvrg" podStartSLOduration=2.049411195 podStartE2EDuration="2.049411195s" podCreationTimestamp="2025-06-06 10:23:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:23:04.047176368 +0000 UTC m=+4195.322601911" watchObservedRunningTime="2025-06-06 10:23:04.049411195 +0000 UTC m=+4195.324836738" Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.483516 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-5rvrg"] Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.484811 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-5rvrg" podUID="9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" containerName="container-00" containerID="cri-o://13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3" gracePeriod=2 Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.493290 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-5rvrg"] Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.606060 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5rvrg" Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.668145 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzfn6\" (UniqueName: \"kubernetes.io/projected/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-kube-api-access-gzfn6\") pod \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.668193 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-host\") pod \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\" (UID: \"9278b0f7-d8a8-446f-a9b0-61b9e83d45eb\") " Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.668379 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-host" (OuterVolumeSpecName: "host") pod "9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" (UID: "9278b0f7-d8a8-446f-a9b0-61b9e83d45eb"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.668838 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.675378 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-kube-api-access-gzfn6" (OuterVolumeSpecName: "kube-api-access-gzfn6") pod "9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" (UID: "9278b0f7-d8a8-446f-a9b0-61b9e83d45eb"). InnerVolumeSpecName "kube-api-access-gzfn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.771314 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzfn6\" (UniqueName: \"kubernetes.io/projected/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb-kube-api-access-gzfn6\") on node \"crc\" DevicePath \"\"" Jun 06 10:23:13 crc kubenswrapper[4911]: I0606 10:23:13.960848 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" path="/var/lib/kubelet/pods/9278b0f7-d8a8-446f-a9b0-61b9e83d45eb/volumes" Jun 06 10:23:14 crc kubenswrapper[4911]: I0606 10:23:14.125591 4911 generic.go:334] "Generic (PLEG): container finished" podID="9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" containerID="13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3" exitCode=0 Jun 06 10:23:14 crc kubenswrapper[4911]: I0606 10:23:14.125658 4911 scope.go:117] "RemoveContainer" containerID="13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3" Jun 06 10:23:14 crc kubenswrapper[4911]: I0606 10:23:14.125768 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5rvrg" Jun 06 10:23:14 crc kubenswrapper[4911]: I0606 10:23:14.150919 4911 scope.go:117] "RemoveContainer" containerID="13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3" Jun 06 10:23:14 crc kubenswrapper[4911]: E0606 10:23:14.151432 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3\": container with ID starting with 13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3 not found: ID does not exist" containerID="13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3" Jun 06 10:23:14 crc kubenswrapper[4911]: I0606 10:23:14.151498 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3"} err="failed to get container status \"13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3\": rpc error: code = NotFound desc = could not find container \"13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3\": container with ID starting with 13914b3dc1938eb276b06cde7b06960a40be425391aa7c6aca0439ddda515cf3 not found: ID does not exist" Jun 06 10:23:54 crc kubenswrapper[4911]: I0606 10:23:54.300890 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:23:54 crc kubenswrapper[4911]: I0606 10:23:54.301471 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:24:01 crc kubenswrapper[4911]: I0606 10:24:01.905994 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-qbtk5"] Jun 06 10:24:01 crc kubenswrapper[4911]: E0606 10:24:01.906929 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" containerName="container-00" Jun 06 10:24:01 crc kubenswrapper[4911]: I0606 10:24:01.906947 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" containerName="container-00" Jun 06 10:24:01 crc kubenswrapper[4911]: I0606 10:24:01.907158 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9278b0f7-d8a8-446f-a9b0-61b9e83d45eb" containerName="container-00" Jun 06 10:24:01 crc kubenswrapper[4911]: I0606 10:24:01.907857 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qbtk5" Jun 06 10:24:02 crc kubenswrapper[4911]: I0606 10:24:02.022781 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c957992-7574-4b50-99af-344c1f3ab956-host\") pod \"crc-debug-qbtk5\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " pod="openstack/crc-debug-qbtk5" Jun 06 10:24:02 crc kubenswrapper[4911]: I0606 10:24:02.022871 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87p5r\" (UniqueName: \"kubernetes.io/projected/0c957992-7574-4b50-99af-344c1f3ab956-kube-api-access-87p5r\") pod \"crc-debug-qbtk5\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " pod="openstack/crc-debug-qbtk5" Jun 06 10:24:02 crc kubenswrapper[4911]: I0606 10:24:02.124690 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c957992-7574-4b50-99af-344c1f3ab956-host\") pod \"crc-debug-qbtk5\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " pod="openstack/crc-debug-qbtk5" Jun 06 10:24:02 crc kubenswrapper[4911]: I0606 10:24:02.124805 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87p5r\" (UniqueName: \"kubernetes.io/projected/0c957992-7574-4b50-99af-344c1f3ab956-kube-api-access-87p5r\") pod \"crc-debug-qbtk5\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " pod="openstack/crc-debug-qbtk5" Jun 06 10:24:02 crc kubenswrapper[4911]: I0606 10:24:02.124875 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c957992-7574-4b50-99af-344c1f3ab956-host\") pod \"crc-debug-qbtk5\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " pod="openstack/crc-debug-qbtk5" Jun 06 10:24:02 crc kubenswrapper[4911]: I0606 10:24:02.637863 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87p5r\" (UniqueName: \"kubernetes.io/projected/0c957992-7574-4b50-99af-344c1f3ab956-kube-api-access-87p5r\") pod \"crc-debug-qbtk5\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " pod="openstack/crc-debug-qbtk5" Jun 06 10:24:02 crc kubenswrapper[4911]: I0606 10:24:02.828564 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qbtk5" Jun 06 10:24:03 crc kubenswrapper[4911]: I0606 10:24:03.568200 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qbtk5" event={"ID":"0c957992-7574-4b50-99af-344c1f3ab956","Type":"ContainerStarted","Data":"95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c"} Jun 06 10:24:03 crc kubenswrapper[4911]: I0606 10:24:03.568595 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qbtk5" event={"ID":"0c957992-7574-4b50-99af-344c1f3ab956","Type":"ContainerStarted","Data":"f51b966b3765f0bb3f31f87b44774ee9d66dfbfb1e7ebf7e6dc03f603d4594d7"} Jun 06 10:24:04 crc kubenswrapper[4911]: I0606 10:24:04.590149 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-qbtk5" podStartSLOduration=3.590125118 podStartE2EDuration="3.590125118s" podCreationTimestamp="2025-06-06 10:24:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:24:04.589181873 +0000 UTC m=+4255.864607416" watchObservedRunningTime="2025-06-06 10:24:04.590125118 +0000 UTC m=+4255.865550671" Jun 06 10:24:13 crc kubenswrapper[4911]: I0606 10:24:13.743155 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-qbtk5"] Jun 06 10:24:13 crc kubenswrapper[4911]: I0606 10:24:13.743949 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-qbtk5" podUID="0c957992-7574-4b50-99af-344c1f3ab956" containerName="container-00" containerID="cri-o://95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c" gracePeriod=2 Jun 06 10:24:13 crc kubenswrapper[4911]: I0606 10:24:13.754023 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-qbtk5"] Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.030579 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qbtk5" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.131006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87p5r\" (UniqueName: \"kubernetes.io/projected/0c957992-7574-4b50-99af-344c1f3ab956-kube-api-access-87p5r\") pod \"0c957992-7574-4b50-99af-344c1f3ab956\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.131427 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c957992-7574-4b50-99af-344c1f3ab956-host\") pod \"0c957992-7574-4b50-99af-344c1f3ab956\" (UID: \"0c957992-7574-4b50-99af-344c1f3ab956\") " Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.131600 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c957992-7574-4b50-99af-344c1f3ab956-host" (OuterVolumeSpecName: "host") pod "0c957992-7574-4b50-99af-344c1f3ab956" (UID: "0c957992-7574-4b50-99af-344c1f3ab956"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.131990 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c957992-7574-4b50-99af-344c1f3ab956-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.137691 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c957992-7574-4b50-99af-344c1f3ab956-kube-api-access-87p5r" (OuterVolumeSpecName: "kube-api-access-87p5r") pod "0c957992-7574-4b50-99af-344c1f3ab956" (UID: "0c957992-7574-4b50-99af-344c1f3ab956"). InnerVolumeSpecName "kube-api-access-87p5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.234981 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87p5r\" (UniqueName: \"kubernetes.io/projected/0c957992-7574-4b50-99af-344c1f3ab956-kube-api-access-87p5r\") on node \"crc\" DevicePath \"\"" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.682413 4911 generic.go:334] "Generic (PLEG): container finished" podID="0c957992-7574-4b50-99af-344c1f3ab956" containerID="95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c" exitCode=0 Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.682477 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qbtk5" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.682504 4911 scope.go:117] "RemoveContainer" containerID="95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.716779 4911 scope.go:117] "RemoveContainer" containerID="95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c" Jun 06 10:24:14 crc kubenswrapper[4911]: E0606 10:24:14.717374 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c\": container with ID starting with 95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c not found: ID does not exist" containerID="95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c" Jun 06 10:24:14 crc kubenswrapper[4911]: I0606 10:24:14.717423 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c"} err="failed to get container status \"95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c\": rpc error: code = NotFound desc = could not find container \"95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c\": container with ID starting with 95a6e1de94d0d736ae9a882decd78c776ef938dbdc239b7227de8121bf01435c not found: ID does not exist" Jun 06 10:24:15 crc kubenswrapper[4911]: I0606 10:24:15.958014 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c957992-7574-4b50-99af-344c1f3ab956" path="/var/lib/kubelet/pods/0c957992-7574-4b50-99af-344c1f3ab956/volumes" Jun 06 10:24:24 crc kubenswrapper[4911]: I0606 10:24:24.299953 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:24:24 crc kubenswrapper[4911]: I0606 10:24:24.300547 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:24:54 crc kubenswrapper[4911]: I0606 10:24:54.300484 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:24:54 crc kubenswrapper[4911]: I0606 10:24:54.301053 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:24:54 crc kubenswrapper[4911]: I0606 10:24:54.301142 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:24:54 crc kubenswrapper[4911]: I0606 10:24:54.302218 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:24:54 crc kubenswrapper[4911]: I0606 10:24:54.302281 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" gracePeriod=600 Jun 06 10:24:54 crc kubenswrapper[4911]: E0606 10:24:54.447797 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:24:55 crc kubenswrapper[4911]: I0606 10:24:55.055009 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" exitCode=0 Jun 06 10:24:55 crc kubenswrapper[4911]: I0606 10:24:55.055083 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112"} Jun 06 10:24:55 crc kubenswrapper[4911]: I0606 10:24:55.055362 4911 scope.go:117] "RemoveContainer" containerID="2ffdccc55218e0a3d0931171f34099cd1d7e39076107fabce4a8a019d0583152" Jun 06 10:24:55 crc kubenswrapper[4911]: I0606 10:24:55.056055 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:24:55 crc kubenswrapper[4911]: E0606 10:24:55.056410 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.148487 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-8g7bl"] Jun 06 10:25:02 crc kubenswrapper[4911]: E0606 10:25:02.149394 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c957992-7574-4b50-99af-344c1f3ab956" containerName="container-00" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.149407 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c957992-7574-4b50-99af-344c1f3ab956" containerName="container-00" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.149587 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c957992-7574-4b50-99af-344c1f3ab956" containerName="container-00" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.150229 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8g7bl" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.158631 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5245669a-fce5-4a1f-9d08-40b5678e5c00-host\") pod \"crc-debug-8g7bl\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " pod="openstack/crc-debug-8g7bl" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.158702 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh7zv\" (UniqueName: \"kubernetes.io/projected/5245669a-fce5-4a1f-9d08-40b5678e5c00-kube-api-access-nh7zv\") pod \"crc-debug-8g7bl\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " pod="openstack/crc-debug-8g7bl" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.261114 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5245669a-fce5-4a1f-9d08-40b5678e5c00-host\") pod \"crc-debug-8g7bl\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " pod="openstack/crc-debug-8g7bl" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.261190 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh7zv\" (UniqueName: \"kubernetes.io/projected/5245669a-fce5-4a1f-9d08-40b5678e5c00-kube-api-access-nh7zv\") pod \"crc-debug-8g7bl\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " pod="openstack/crc-debug-8g7bl" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.261278 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5245669a-fce5-4a1f-9d08-40b5678e5c00-host\") pod \"crc-debug-8g7bl\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " pod="openstack/crc-debug-8g7bl" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.278614 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh7zv\" (UniqueName: \"kubernetes.io/projected/5245669a-fce5-4a1f-9d08-40b5678e5c00-kube-api-access-nh7zv\") pod \"crc-debug-8g7bl\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " pod="openstack/crc-debug-8g7bl" Jun 06 10:25:02 crc kubenswrapper[4911]: I0606 10:25:02.471297 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8g7bl" Jun 06 10:25:03 crc kubenswrapper[4911]: I0606 10:25:03.122806 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8g7bl" event={"ID":"5245669a-fce5-4a1f-9d08-40b5678e5c00","Type":"ContainerStarted","Data":"51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc"} Jun 06 10:25:03 crc kubenswrapper[4911]: I0606 10:25:03.123120 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8g7bl" event={"ID":"5245669a-fce5-4a1f-9d08-40b5678e5c00","Type":"ContainerStarted","Data":"2a8749b109ca3919b7a80d63321eb8ad521655fbe20860378bc1f8ff64347167"} Jun 06 10:25:03 crc kubenswrapper[4911]: I0606 10:25:03.140640 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-8g7bl" podStartSLOduration=1.140622741 podStartE2EDuration="1.140622741s" podCreationTimestamp="2025-06-06 10:25:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:25:03.134888544 +0000 UTC m=+4314.410314097" watchObservedRunningTime="2025-06-06 10:25:03.140622741 +0000 UTC m=+4314.416048284" Jun 06 10:25:07 crc kubenswrapper[4911]: I0606 10:25:07.948897 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:25:07 crc kubenswrapper[4911]: E0606 10:25:07.949761 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.353738 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-8g7bl"] Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.354857 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-8g7bl" podUID="5245669a-fce5-4a1f-9d08-40b5678e5c00" containerName="container-00" containerID="cri-o://51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc" gracePeriod=2 Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.365692 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-8g7bl"] Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.512978 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8g7bl" Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.624404 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh7zv\" (UniqueName: \"kubernetes.io/projected/5245669a-fce5-4a1f-9d08-40b5678e5c00-kube-api-access-nh7zv\") pod \"5245669a-fce5-4a1f-9d08-40b5678e5c00\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.624799 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5245669a-fce5-4a1f-9d08-40b5678e5c00-host\") pod \"5245669a-fce5-4a1f-9d08-40b5678e5c00\" (UID: \"5245669a-fce5-4a1f-9d08-40b5678e5c00\") " Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.625193 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5245669a-fce5-4a1f-9d08-40b5678e5c00-host" (OuterVolumeSpecName: "host") pod "5245669a-fce5-4a1f-9d08-40b5678e5c00" (UID: "5245669a-fce5-4a1f-9d08-40b5678e5c00"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.626302 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5245669a-fce5-4a1f-9d08-40b5678e5c00-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.633854 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5245669a-fce5-4a1f-9d08-40b5678e5c00-kube-api-access-nh7zv" (OuterVolumeSpecName: "kube-api-access-nh7zv") pod "5245669a-fce5-4a1f-9d08-40b5678e5c00" (UID: "5245669a-fce5-4a1f-9d08-40b5678e5c00"). InnerVolumeSpecName "kube-api-access-nh7zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.728904 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh7zv\" (UniqueName: \"kubernetes.io/projected/5245669a-fce5-4a1f-9d08-40b5678e5c00-kube-api-access-nh7zv\") on node \"crc\" DevicePath \"\"" Jun 06 10:25:13 crc kubenswrapper[4911]: I0606 10:25:13.958897 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5245669a-fce5-4a1f-9d08-40b5678e5c00" path="/var/lib/kubelet/pods/5245669a-fce5-4a1f-9d08-40b5678e5c00/volumes" Jun 06 10:25:14 crc kubenswrapper[4911]: I0606 10:25:14.265147 4911 generic.go:334] "Generic (PLEG): container finished" podID="5245669a-fce5-4a1f-9d08-40b5678e5c00" containerID="51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc" exitCode=0 Jun 06 10:25:14 crc kubenswrapper[4911]: I0606 10:25:14.265245 4911 scope.go:117] "RemoveContainer" containerID="51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc" Jun 06 10:25:14 crc kubenswrapper[4911]: I0606 10:25:14.265474 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8g7bl" Jun 06 10:25:14 crc kubenswrapper[4911]: I0606 10:25:14.290924 4911 scope.go:117] "RemoveContainer" containerID="51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc" Jun 06 10:25:14 crc kubenswrapper[4911]: E0606 10:25:14.291324 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc\": container with ID starting with 51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc not found: ID does not exist" containerID="51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc" Jun 06 10:25:14 crc kubenswrapper[4911]: I0606 10:25:14.291379 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc"} err="failed to get container status \"51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc\": rpc error: code = NotFound desc = could not find container \"51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc\": container with ID starting with 51d256076f4436576723c1b30978c5cce68f93611c511eaa0020613cf2a930dc not found: ID does not exist" Jun 06 10:25:18 crc kubenswrapper[4911]: I0606 10:25:18.947976 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:25:18 crc kubenswrapper[4911]: E0606 10:25:18.948834 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:25:30 crc kubenswrapper[4911]: I0606 10:25:30.948622 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:25:30 crc kubenswrapper[4911]: E0606 10:25:30.950012 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:25:44 crc kubenswrapper[4911]: I0606 10:25:44.949053 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:25:44 crc kubenswrapper[4911]: E0606 10:25:44.950285 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:25:57 crc kubenswrapper[4911]: I0606 10:25:57.948039 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:25:57 crc kubenswrapper[4911]: E0606 10:25:57.948771 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.806477 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-tnpfg"] Jun 06 10:26:01 crc kubenswrapper[4911]: E0606 10:26:01.808053 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5245669a-fce5-4a1f-9d08-40b5678e5c00" containerName="container-00" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.808073 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5245669a-fce5-4a1f-9d08-40b5678e5c00" containerName="container-00" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.808338 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5245669a-fce5-4a1f-9d08-40b5678e5c00" containerName="container-00" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.809377 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tnpfg" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.839424 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9fdp\" (UniqueName: \"kubernetes.io/projected/0acb1cec-007e-4740-b9e8-95421a1b0231-kube-api-access-p9fdp\") pod \"crc-debug-tnpfg\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " pod="openstack/crc-debug-tnpfg" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.839612 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0acb1cec-007e-4740-b9e8-95421a1b0231-host\") pod \"crc-debug-tnpfg\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " pod="openstack/crc-debug-tnpfg" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.941768 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9fdp\" (UniqueName: \"kubernetes.io/projected/0acb1cec-007e-4740-b9e8-95421a1b0231-kube-api-access-p9fdp\") pod \"crc-debug-tnpfg\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " pod="openstack/crc-debug-tnpfg" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.941942 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0acb1cec-007e-4740-b9e8-95421a1b0231-host\") pod \"crc-debug-tnpfg\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " pod="openstack/crc-debug-tnpfg" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.942131 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0acb1cec-007e-4740-b9e8-95421a1b0231-host\") pod \"crc-debug-tnpfg\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " pod="openstack/crc-debug-tnpfg" Jun 06 10:26:01 crc kubenswrapper[4911]: I0606 10:26:01.967587 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9fdp\" (UniqueName: \"kubernetes.io/projected/0acb1cec-007e-4740-b9e8-95421a1b0231-kube-api-access-p9fdp\") pod \"crc-debug-tnpfg\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " pod="openstack/crc-debug-tnpfg" Jun 06 10:26:02 crc kubenswrapper[4911]: I0606 10:26:02.131989 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tnpfg" Jun 06 10:26:02 crc kubenswrapper[4911]: I0606 10:26:02.809515 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tnpfg" event={"ID":"0acb1cec-007e-4740-b9e8-95421a1b0231","Type":"ContainerStarted","Data":"ee761082244c084cb4e1846c3001f07602b8bf1ff2d63653bf2e1111a08683b1"} Jun 06 10:26:02 crc kubenswrapper[4911]: I0606 10:26:02.809965 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tnpfg" event={"ID":"0acb1cec-007e-4740-b9e8-95421a1b0231","Type":"ContainerStarted","Data":"4fc3d77cc03185b7c41da133e410f65104994a69144b52cb3808c788bfe9b977"} Jun 06 10:26:02 crc kubenswrapper[4911]: I0606 10:26:02.829866 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-tnpfg" podStartSLOduration=1.8298363800000002 podStartE2EDuration="1.82983638s" podCreationTimestamp="2025-06-06 10:26:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:26:02.823984199 +0000 UTC m=+4374.099409762" watchObservedRunningTime="2025-06-06 10:26:02.82983638 +0000 UTC m=+4374.105261953" Jun 06 10:26:08 crc kubenswrapper[4911]: I0606 10:26:08.948831 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:26:08 crc kubenswrapper[4911]: E0606 10:26:08.949807 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:26:12 crc kubenswrapper[4911]: I0606 10:26:12.844886 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-tnpfg"] Jun 06 10:26:12 crc kubenswrapper[4911]: I0606 10:26:12.845859 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-tnpfg" podUID="0acb1cec-007e-4740-b9e8-95421a1b0231" containerName="container-00" containerID="cri-o://ee761082244c084cb4e1846c3001f07602b8bf1ff2d63653bf2e1111a08683b1" gracePeriod=2 Jun 06 10:26:12 crc kubenswrapper[4911]: I0606 10:26:12.856381 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-tnpfg"] Jun 06 10:26:12 crc kubenswrapper[4911]: I0606 10:26:12.931912 4911 generic.go:334] "Generic (PLEG): container finished" podID="0acb1cec-007e-4740-b9e8-95421a1b0231" containerID="ee761082244c084cb4e1846c3001f07602b8bf1ff2d63653bf2e1111a08683b1" exitCode=0 Jun 06 10:26:12 crc kubenswrapper[4911]: I0606 10:26:12.932013 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fc3d77cc03185b7c41da133e410f65104994a69144b52cb3808c788bfe9b977" Jun 06 10:26:12 crc kubenswrapper[4911]: I0606 10:26:12.940605 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tnpfg" Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.039368 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0acb1cec-007e-4740-b9e8-95421a1b0231-host\") pod \"0acb1cec-007e-4740-b9e8-95421a1b0231\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.039503 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0acb1cec-007e-4740-b9e8-95421a1b0231-host" (OuterVolumeSpecName: "host") pod "0acb1cec-007e-4740-b9e8-95421a1b0231" (UID: "0acb1cec-007e-4740-b9e8-95421a1b0231"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.039578 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9fdp\" (UniqueName: \"kubernetes.io/projected/0acb1cec-007e-4740-b9e8-95421a1b0231-kube-api-access-p9fdp\") pod \"0acb1cec-007e-4740-b9e8-95421a1b0231\" (UID: \"0acb1cec-007e-4740-b9e8-95421a1b0231\") " Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.040038 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0acb1cec-007e-4740-b9e8-95421a1b0231-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.047965 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0acb1cec-007e-4740-b9e8-95421a1b0231-kube-api-access-p9fdp" (OuterVolumeSpecName: "kube-api-access-p9fdp") pod "0acb1cec-007e-4740-b9e8-95421a1b0231" (UID: "0acb1cec-007e-4740-b9e8-95421a1b0231"). InnerVolumeSpecName "kube-api-access-p9fdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.142473 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9fdp\" (UniqueName: \"kubernetes.io/projected/0acb1cec-007e-4740-b9e8-95421a1b0231-kube-api-access-p9fdp\") on node \"crc\" DevicePath \"\"" Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.941898 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tnpfg" Jun 06 10:26:13 crc kubenswrapper[4911]: I0606 10:26:13.963817 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0acb1cec-007e-4740-b9e8-95421a1b0231" path="/var/lib/kubelet/pods/0acb1cec-007e-4740-b9e8-95421a1b0231/volumes" Jun 06 10:26:20 crc kubenswrapper[4911]: I0606 10:26:20.948183 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:26:20 crc kubenswrapper[4911]: E0606 10:26:20.949292 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.251457 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5xzhm"] Jun 06 10:26:26 crc kubenswrapper[4911]: E0606 10:26:26.253268 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0acb1cec-007e-4740-b9e8-95421a1b0231" containerName="container-00" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.253313 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0acb1cec-007e-4740-b9e8-95421a1b0231" containerName="container-00" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.253649 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0acb1cec-007e-4740-b9e8-95421a1b0231" containerName="container-00" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.256633 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.264898 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5xzhm"] Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.314536 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk99x\" (UniqueName: \"kubernetes.io/projected/e926d6d4-5a7c-4869-bc11-1d736e4e614b-kube-api-access-pk99x\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.315014 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-utilities\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.315139 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-catalog-content\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.417508 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-catalog-content\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.417643 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk99x\" (UniqueName: \"kubernetes.io/projected/e926d6d4-5a7c-4869-bc11-1d736e4e614b-kube-api-access-pk99x\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.417774 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-utilities\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.418059 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-catalog-content\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.418292 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-utilities\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.448312 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk99x\" (UniqueName: \"kubernetes.io/projected/e926d6d4-5a7c-4869-bc11-1d736e4e614b-kube-api-access-pk99x\") pod \"community-operators-5xzhm\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:26 crc kubenswrapper[4911]: I0606 10:26:26.587704 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:27 crc kubenswrapper[4911]: I0606 10:26:27.378947 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5xzhm"] Jun 06 10:26:28 crc kubenswrapper[4911]: I0606 10:26:28.108021 4911 generic.go:334] "Generic (PLEG): container finished" podID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerID="f450a2dacbceb6c99d088495ac689d256c00e534c771ad44102b685054b15b1b" exitCode=0 Jun 06 10:26:28 crc kubenswrapper[4911]: I0606 10:26:28.108671 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xzhm" event={"ID":"e926d6d4-5a7c-4869-bc11-1d736e4e614b","Type":"ContainerDied","Data":"f450a2dacbceb6c99d088495ac689d256c00e534c771ad44102b685054b15b1b"} Jun 06 10:26:28 crc kubenswrapper[4911]: I0606 10:26:28.108724 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xzhm" event={"ID":"e926d6d4-5a7c-4869-bc11-1d736e4e614b","Type":"ContainerStarted","Data":"62f37315e43e3659756d6a59d91865aaf1f5dd97fefe15b8990c961c3f99d6a6"} Jun 06 10:26:28 crc kubenswrapper[4911]: I0606 10:26:28.112211 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:26:29 crc kubenswrapper[4911]: I0606 10:26:29.123110 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xzhm" event={"ID":"e926d6d4-5a7c-4869-bc11-1d736e4e614b","Type":"ContainerStarted","Data":"5e304e548b1fe05b6b41f9d01bb340ab5ecde981a591a53cc548a0f1e6d78259"} Jun 06 10:26:30 crc kubenswrapper[4911]: I0606 10:26:30.139278 4911 generic.go:334] "Generic (PLEG): container finished" podID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerID="5e304e548b1fe05b6b41f9d01bb340ab5ecde981a591a53cc548a0f1e6d78259" exitCode=0 Jun 06 10:26:30 crc kubenswrapper[4911]: I0606 10:26:30.139334 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xzhm" event={"ID":"e926d6d4-5a7c-4869-bc11-1d736e4e614b","Type":"ContainerDied","Data":"5e304e548b1fe05b6b41f9d01bb340ab5ecde981a591a53cc548a0f1e6d78259"} Jun 06 10:26:31 crc kubenswrapper[4911]: I0606 10:26:31.949279 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:26:31 crc kubenswrapper[4911]: E0606 10:26:31.950154 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:26:32 crc kubenswrapper[4911]: I0606 10:26:32.165707 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xzhm" event={"ID":"e926d6d4-5a7c-4869-bc11-1d736e4e614b","Type":"ContainerStarted","Data":"7d95a2296e1dc2e8bd935fbff8af70f172c0e1b968e7616761c67c07d2666a26"} Jun 06 10:26:32 crc kubenswrapper[4911]: I0606 10:26:32.188352 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5xzhm" podStartSLOduration=2.896502658 podStartE2EDuration="6.188316502s" podCreationTimestamp="2025-06-06 10:26:26 +0000 UTC" firstStartedPulling="2025-06-06 10:26:28.111941476 +0000 UTC m=+4399.387367019" lastFinishedPulling="2025-06-06 10:26:31.40375532 +0000 UTC m=+4402.679180863" observedRunningTime="2025-06-06 10:26:32.186979408 +0000 UTC m=+4403.462404991" watchObservedRunningTime="2025-06-06 10:26:32.188316502 +0000 UTC m=+4403.463742045" Jun 06 10:26:36 crc kubenswrapper[4911]: I0606 10:26:36.587953 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:36 crc kubenswrapper[4911]: I0606 10:26:36.588726 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:36 crc kubenswrapper[4911]: I0606 10:26:36.642871 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:37 crc kubenswrapper[4911]: I0606 10:26:37.881225 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:37 crc kubenswrapper[4911]: I0606 10:26:37.942523 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5xzhm"] Jun 06 10:26:39 crc kubenswrapper[4911]: I0606 10:26:39.246708 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5xzhm" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="registry-server" containerID="cri-o://7d95a2296e1dc2e8bd935fbff8af70f172c0e1b968e7616761c67c07d2666a26" gracePeriod=2 Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.261686 4911 generic.go:334] "Generic (PLEG): container finished" podID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerID="7d95a2296e1dc2e8bd935fbff8af70f172c0e1b968e7616761c67c07d2666a26" exitCode=0 Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.262529 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xzhm" event={"ID":"e926d6d4-5a7c-4869-bc11-1d736e4e614b","Type":"ContainerDied","Data":"7d95a2296e1dc2e8bd935fbff8af70f172c0e1b968e7616761c67c07d2666a26"} Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.608284 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.679690 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-catalog-content\") pod \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.679769 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-utilities\") pod \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.680180 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk99x\" (UniqueName: \"kubernetes.io/projected/e926d6d4-5a7c-4869-bc11-1d736e4e614b-kube-api-access-pk99x\") pod \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\" (UID: \"e926d6d4-5a7c-4869-bc11-1d736e4e614b\") " Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.684182 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-utilities" (OuterVolumeSpecName: "utilities") pod "e926d6d4-5a7c-4869-bc11-1d736e4e614b" (UID: "e926d6d4-5a7c-4869-bc11-1d736e4e614b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.689681 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e926d6d4-5a7c-4869-bc11-1d736e4e614b-kube-api-access-pk99x" (OuterVolumeSpecName: "kube-api-access-pk99x") pod "e926d6d4-5a7c-4869-bc11-1d736e4e614b" (UID: "e926d6d4-5a7c-4869-bc11-1d736e4e614b"). InnerVolumeSpecName "kube-api-access-pk99x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.748881 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e926d6d4-5a7c-4869-bc11-1d736e4e614b" (UID: "e926d6d4-5a7c-4869-bc11-1d736e4e614b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.783058 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.783143 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e926d6d4-5a7c-4869-bc11-1d736e4e614b-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:26:40 crc kubenswrapper[4911]: I0606 10:26:40.783155 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk99x\" (UniqueName: \"kubernetes.io/projected/e926d6d4-5a7c-4869-bc11-1d736e4e614b-kube-api-access-pk99x\") on node \"crc\" DevicePath \"\"" Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.275336 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5xzhm" event={"ID":"e926d6d4-5a7c-4869-bc11-1d736e4e614b","Type":"ContainerDied","Data":"62f37315e43e3659756d6a59d91865aaf1f5dd97fefe15b8990c961c3f99d6a6"} Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.275385 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5xzhm" Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.275902 4911 scope.go:117] "RemoveContainer" containerID="7d95a2296e1dc2e8bd935fbff8af70f172c0e1b968e7616761c67c07d2666a26" Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.303846 4911 scope.go:117] "RemoveContainer" containerID="5e304e548b1fe05b6b41f9d01bb340ab5ecde981a591a53cc548a0f1e6d78259" Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.335307 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5xzhm"] Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.348967 4911 scope.go:117] "RemoveContainer" containerID="f450a2dacbceb6c99d088495ac689d256c00e534c771ad44102b685054b15b1b" Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.364835 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5xzhm"] Jun 06 10:26:41 crc kubenswrapper[4911]: I0606 10:26:41.963544 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" path="/var/lib/kubelet/pods/e926d6d4-5a7c-4869-bc11-1d736e4e614b/volumes" Jun 06 10:26:45 crc kubenswrapper[4911]: I0606 10:26:45.958527 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:26:45 crc kubenswrapper[4911]: E0606 10:26:45.961131 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:27:00 crc kubenswrapper[4911]: I0606 10:27:00.949745 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:27:00 crc kubenswrapper[4911]: E0606 10:27:00.951450 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.245526 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-2g5s7"] Jun 06 10:27:02 crc kubenswrapper[4911]: E0606 10:27:02.246128 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="extract-utilities" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.246149 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="extract-utilities" Jun 06 10:27:02 crc kubenswrapper[4911]: E0606 10:27:02.246188 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="registry-server" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.246197 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="registry-server" Jun 06 10:27:02 crc kubenswrapper[4911]: E0606 10:27:02.246229 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="extract-content" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.246239 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="extract-content" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.246513 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e926d6d4-5a7c-4869-bc11-1d736e4e614b" containerName="registry-server" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.247538 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2g5s7" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.361282 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df733ed9-0740-4a42-8f4c-bbc66abafead-host\") pod \"crc-debug-2g5s7\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " pod="openstack/crc-debug-2g5s7" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.361865 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8bsf\" (UniqueName: \"kubernetes.io/projected/df733ed9-0740-4a42-8f4c-bbc66abafead-kube-api-access-h8bsf\") pod \"crc-debug-2g5s7\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " pod="openstack/crc-debug-2g5s7" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.464392 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df733ed9-0740-4a42-8f4c-bbc66abafead-host\") pod \"crc-debug-2g5s7\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " pod="openstack/crc-debug-2g5s7" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.464562 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8bsf\" (UniqueName: \"kubernetes.io/projected/df733ed9-0740-4a42-8f4c-bbc66abafead-kube-api-access-h8bsf\") pod \"crc-debug-2g5s7\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " pod="openstack/crc-debug-2g5s7" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.464568 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df733ed9-0740-4a42-8f4c-bbc66abafead-host\") pod \"crc-debug-2g5s7\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " pod="openstack/crc-debug-2g5s7" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.504827 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8bsf\" (UniqueName: \"kubernetes.io/projected/df733ed9-0740-4a42-8f4c-bbc66abafead-kube-api-access-h8bsf\") pod \"crc-debug-2g5s7\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " pod="openstack/crc-debug-2g5s7" Jun 06 10:27:02 crc kubenswrapper[4911]: I0606 10:27:02.569994 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2g5s7" Jun 06 10:27:03 crc kubenswrapper[4911]: I0606 10:27:03.547296 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2g5s7" event={"ID":"df733ed9-0740-4a42-8f4c-bbc66abafead","Type":"ContainerStarted","Data":"e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879"} Jun 06 10:27:03 crc kubenswrapper[4911]: I0606 10:27:03.549407 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2g5s7" event={"ID":"df733ed9-0740-4a42-8f4c-bbc66abafead","Type":"ContainerStarted","Data":"b9d643dc3d43235da6fae29cadc62113a0b4808b1f0cf606d0036d6b7d7577e4"} Jun 06 10:27:03 crc kubenswrapper[4911]: I0606 10:27:03.568198 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-2g5s7" podStartSLOduration=1.5681643140000001 podStartE2EDuration="1.568164314s" podCreationTimestamp="2025-06-06 10:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:27:03.564793828 +0000 UTC m=+4434.840219381" watchObservedRunningTime="2025-06-06 10:27:03.568164314 +0000 UTC m=+4434.843589867" Jun 06 10:27:13 crc kubenswrapper[4911]: I0606 10:27:13.948040 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:27:13 crc kubenswrapper[4911]: E0606 10:27:13.949451 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.150031 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-2g5s7"] Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.150447 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-2g5s7" podUID="df733ed9-0740-4a42-8f4c-bbc66abafead" containerName="container-00" containerID="cri-o://e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879" gracePeriod=2 Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.170007 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-2g5s7"] Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.514440 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2g5s7" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.560954 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df733ed9-0740-4a42-8f4c-bbc66abafead-host\") pod \"df733ed9-0740-4a42-8f4c-bbc66abafead\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.561134 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/df733ed9-0740-4a42-8f4c-bbc66abafead-host" (OuterVolumeSpecName: "host") pod "df733ed9-0740-4a42-8f4c-bbc66abafead" (UID: "df733ed9-0740-4a42-8f4c-bbc66abafead"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.561540 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8bsf\" (UniqueName: \"kubernetes.io/projected/df733ed9-0740-4a42-8f4c-bbc66abafead-kube-api-access-h8bsf\") pod \"df733ed9-0740-4a42-8f4c-bbc66abafead\" (UID: \"df733ed9-0740-4a42-8f4c-bbc66abafead\") " Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.562740 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/df733ed9-0740-4a42-8f4c-bbc66abafead-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.566731 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df733ed9-0740-4a42-8f4c-bbc66abafead-kube-api-access-h8bsf" (OuterVolumeSpecName: "kube-api-access-h8bsf") pod "df733ed9-0740-4a42-8f4c-bbc66abafead" (UID: "df733ed9-0740-4a42-8f4c-bbc66abafead"). InnerVolumeSpecName "kube-api-access-h8bsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.664857 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8bsf\" (UniqueName: \"kubernetes.io/projected/df733ed9-0740-4a42-8f4c-bbc66abafead-kube-api-access-h8bsf\") on node \"crc\" DevicePath \"\"" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.689565 4911 generic.go:334] "Generic (PLEG): container finished" podID="df733ed9-0740-4a42-8f4c-bbc66abafead" containerID="e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879" exitCode=0 Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.689627 4911 scope.go:117] "RemoveContainer" containerID="e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.689648 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2g5s7" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.715417 4911 scope.go:117] "RemoveContainer" containerID="e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879" Jun 06 10:27:14 crc kubenswrapper[4911]: E0606 10:27:14.716386 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879\": container with ID starting with e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879 not found: ID does not exist" containerID="e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879" Jun 06 10:27:14 crc kubenswrapper[4911]: I0606 10:27:14.716424 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879"} err="failed to get container status \"e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879\": rpc error: code = NotFound desc = could not find container \"e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879\": container with ID starting with e2e126ef63f30e4b8642da06153fdef082ca01e496864d461cc0d1139fab9879 not found: ID does not exist" Jun 06 10:27:15 crc kubenswrapper[4911]: I0606 10:27:15.967520 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df733ed9-0740-4a42-8f4c-bbc66abafead" path="/var/lib/kubelet/pods/df733ed9-0740-4a42-8f4c-bbc66abafead/volumes" Jun 06 10:27:25 crc kubenswrapper[4911]: I0606 10:27:25.948673 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:27:25 crc kubenswrapper[4911]: E0606 10:27:25.949916 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:27:36 crc kubenswrapper[4911]: I0606 10:27:36.949248 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:27:36 crc kubenswrapper[4911]: E0606 10:27:36.950772 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:27:45 crc kubenswrapper[4911]: I0606 10:27:45.880571 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kbkl4"] Jun 06 10:27:45 crc kubenswrapper[4911]: E0606 10:27:45.882115 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df733ed9-0740-4a42-8f4c-bbc66abafead" containerName="container-00" Jun 06 10:27:45 crc kubenswrapper[4911]: I0606 10:27:45.882134 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="df733ed9-0740-4a42-8f4c-bbc66abafead" containerName="container-00" Jun 06 10:27:45 crc kubenswrapper[4911]: I0606 10:27:45.882422 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="df733ed9-0740-4a42-8f4c-bbc66abafead" containerName="container-00" Jun 06 10:27:45 crc kubenswrapper[4911]: I0606 10:27:45.884264 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:45 crc kubenswrapper[4911]: I0606 10:27:45.893540 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kbkl4"] Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.074760 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-utilities\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.075710 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b82tw\" (UniqueName: \"kubernetes.io/projected/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-kube-api-access-b82tw\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.075894 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-catalog-content\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.178315 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b82tw\" (UniqueName: \"kubernetes.io/projected/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-kube-api-access-b82tw\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.178458 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-catalog-content\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.178633 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-utilities\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.179231 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-utilities\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.179274 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-catalog-content\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.217613 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b82tw\" (UniqueName: \"kubernetes.io/projected/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-kube-api-access-b82tw\") pod \"redhat-operators-kbkl4\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:46 crc kubenswrapper[4911]: I0606 10:27:46.514869 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:47 crc kubenswrapper[4911]: I0606 10:27:47.330547 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kbkl4"] Jun 06 10:27:48 crc kubenswrapper[4911]: I0606 10:27:48.100321 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbkl4" event={"ID":"d1b60d44-533e-4c6c-9e8d-e2cc248e598d","Type":"ContainerStarted","Data":"0ab1c156bada9a01d122871770497b5c7c247ed8ff02b89824e4ebf2619dda56"} Jun 06 10:27:49 crc kubenswrapper[4911]: I0606 10:27:49.111842 4911 generic.go:334] "Generic (PLEG): container finished" podID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerID="8c05a412068d96b84a76eb50a13a88f662c2913f4b7f28eab80095222f84986b" exitCode=0 Jun 06 10:27:49 crc kubenswrapper[4911]: I0606 10:27:49.112015 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbkl4" event={"ID":"d1b60d44-533e-4c6c-9e8d-e2cc248e598d","Type":"ContainerDied","Data":"8c05a412068d96b84a76eb50a13a88f662c2913f4b7f28eab80095222f84986b"} Jun 06 10:27:50 crc kubenswrapper[4911]: I0606 10:27:50.948233 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:27:50 crc kubenswrapper[4911]: E0606 10:27:50.949009 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:27:52 crc kubenswrapper[4911]: I0606 10:27:52.153245 4911 generic.go:334] "Generic (PLEG): container finished" podID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerID="69330685461e9d290945911d406cae1f35a680cd5660edf7849722f002607818" exitCode=0 Jun 06 10:27:52 crc kubenswrapper[4911]: I0606 10:27:52.153485 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbkl4" event={"ID":"d1b60d44-533e-4c6c-9e8d-e2cc248e598d","Type":"ContainerDied","Data":"69330685461e9d290945911d406cae1f35a680cd5660edf7849722f002607818"} Jun 06 10:27:55 crc kubenswrapper[4911]: I0606 10:27:55.198077 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbkl4" event={"ID":"d1b60d44-533e-4c6c-9e8d-e2cc248e598d","Type":"ContainerStarted","Data":"1ef1db91324a7e25dc3fb1faf52a69ed9f39042a18694a7ca39d4f4070a4efd1"} Jun 06 10:27:55 crc kubenswrapper[4911]: I0606 10:27:55.238068 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kbkl4" podStartSLOduration=5.302686745 podStartE2EDuration="10.238034534s" podCreationTimestamp="2025-06-06 10:27:45 +0000 UTC" firstStartedPulling="2025-06-06 10:27:49.114834034 +0000 UTC m=+4480.390259577" lastFinishedPulling="2025-06-06 10:27:54.050181823 +0000 UTC m=+4485.325607366" observedRunningTime="2025-06-06 10:27:55.22736669 +0000 UTC m=+4486.502792233" watchObservedRunningTime="2025-06-06 10:27:55.238034534 +0000 UTC m=+4486.513460077" Jun 06 10:27:56 crc kubenswrapper[4911]: I0606 10:27:56.515328 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:56 crc kubenswrapper[4911]: I0606 10:27:56.515935 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:27:57 crc kubenswrapper[4911]: I0606 10:27:57.568577 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kbkl4" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="registry-server" probeResult="failure" output=< Jun 06 10:27:57 crc kubenswrapper[4911]: timeout: failed to connect service ":50051" within 1s Jun 06 10:27:57 crc kubenswrapper[4911]: > Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.489683 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-7dmgc"] Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.491650 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7dmgc" Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.580563 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f1b779-49c2-4d9a-8a23-c870e790b3c4-host\") pod \"crc-debug-7dmgc\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " pod="openstack/crc-debug-7dmgc" Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.580724 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brj77\" (UniqueName: \"kubernetes.io/projected/23f1b779-49c2-4d9a-8a23-c870e790b3c4-kube-api-access-brj77\") pod \"crc-debug-7dmgc\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " pod="openstack/crc-debug-7dmgc" Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.683555 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f1b779-49c2-4d9a-8a23-c870e790b3c4-host\") pod \"crc-debug-7dmgc\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " pod="openstack/crc-debug-7dmgc" Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.683746 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f1b779-49c2-4d9a-8a23-c870e790b3c4-host\") pod \"crc-debug-7dmgc\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " pod="openstack/crc-debug-7dmgc" Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.683762 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brj77\" (UniqueName: \"kubernetes.io/projected/23f1b779-49c2-4d9a-8a23-c870e790b3c4-kube-api-access-brj77\") pod \"crc-debug-7dmgc\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " pod="openstack/crc-debug-7dmgc" Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.707008 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brj77\" (UniqueName: \"kubernetes.io/projected/23f1b779-49c2-4d9a-8a23-c870e790b3c4-kube-api-access-brj77\") pod \"crc-debug-7dmgc\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " pod="openstack/crc-debug-7dmgc" Jun 06 10:28:01 crc kubenswrapper[4911]: I0606 10:28:01.814411 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7dmgc" Jun 06 10:28:03 crc kubenswrapper[4911]: I0606 10:28:03.284394 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-7dmgc" event={"ID":"23f1b779-49c2-4d9a-8a23-c870e790b3c4","Type":"ContainerStarted","Data":"8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df"} Jun 06 10:28:03 crc kubenswrapper[4911]: I0606 10:28:03.285266 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-7dmgc" event={"ID":"23f1b779-49c2-4d9a-8a23-c870e790b3c4","Type":"ContainerStarted","Data":"7b269ea529384b5ee0730165c275ad2ff957078f025e8ec87787e2ae29c6f28a"} Jun 06 10:28:03 crc kubenswrapper[4911]: I0606 10:28:03.306474 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-7dmgc" podStartSLOduration=2.306451328 podStartE2EDuration="2.306451328s" podCreationTimestamp="2025-06-06 10:28:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:28:03.301429108 +0000 UTC m=+4494.576854651" watchObservedRunningTime="2025-06-06 10:28:03.306451328 +0000 UTC m=+4494.581876881" Jun 06 10:28:05 crc kubenswrapper[4911]: I0606 10:28:05.948745 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:28:05 crc kubenswrapper[4911]: E0606 10:28:05.949708 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:28:06 crc kubenswrapper[4911]: I0606 10:28:06.601517 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:28:06 crc kubenswrapper[4911]: I0606 10:28:06.677634 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:28:06 crc kubenswrapper[4911]: I0606 10:28:06.848127 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kbkl4"] Jun 06 10:28:08 crc kubenswrapper[4911]: I0606 10:28:08.338796 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kbkl4" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="registry-server" containerID="cri-o://1ef1db91324a7e25dc3fb1faf52a69ed9f39042a18694a7ca39d4f4070a4efd1" gracePeriod=2 Jun 06 10:28:09 crc kubenswrapper[4911]: I0606 10:28:09.358434 4911 generic.go:334] "Generic (PLEG): container finished" podID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerID="1ef1db91324a7e25dc3fb1faf52a69ed9f39042a18694a7ca39d4f4070a4efd1" exitCode=0 Jun 06 10:28:09 crc kubenswrapper[4911]: I0606 10:28:09.358547 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbkl4" event={"ID":"d1b60d44-533e-4c6c-9e8d-e2cc248e598d","Type":"ContainerDied","Data":"1ef1db91324a7e25dc3fb1faf52a69ed9f39042a18694a7ca39d4f4070a4efd1"} Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.369662 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbkl4" event={"ID":"d1b60d44-533e-4c6c-9e8d-e2cc248e598d","Type":"ContainerDied","Data":"0ab1c156bada9a01d122871770497b5c7c247ed8ff02b89824e4ebf2619dda56"} Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.370109 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ab1c156bada9a01d122871770497b5c7c247ed8ff02b89824e4ebf2619dda56" Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.391275 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.436400 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-utilities\") pod \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.436492 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-catalog-content\") pod \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.436778 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b82tw\" (UniqueName: \"kubernetes.io/projected/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-kube-api-access-b82tw\") pod \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\" (UID: \"d1b60d44-533e-4c6c-9e8d-e2cc248e598d\") " Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.437561 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-utilities" (OuterVolumeSpecName: "utilities") pod "d1b60d44-533e-4c6c-9e8d-e2cc248e598d" (UID: "d1b60d44-533e-4c6c-9e8d-e2cc248e598d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.445512 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-kube-api-access-b82tw" (OuterVolumeSpecName: "kube-api-access-b82tw") pod "d1b60d44-533e-4c6c-9e8d-e2cc248e598d" (UID: "d1b60d44-533e-4c6c-9e8d-e2cc248e598d"). InnerVolumeSpecName "kube-api-access-b82tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.514289 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1b60d44-533e-4c6c-9e8d-e2cc248e598d" (UID: "d1b60d44-533e-4c6c-9e8d-e2cc248e598d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.540278 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.540329 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:28:10 crc kubenswrapper[4911]: I0606 10:28:10.540344 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b82tw\" (UniqueName: \"kubernetes.io/projected/d1b60d44-533e-4c6c-9e8d-e2cc248e598d-kube-api-access-b82tw\") on node \"crc\" DevicePath \"\"" Jun 06 10:28:11 crc kubenswrapper[4911]: I0606 10:28:11.380235 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbkl4" Jun 06 10:28:11 crc kubenswrapper[4911]: I0606 10:28:11.426027 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kbkl4"] Jun 06 10:28:11 crc kubenswrapper[4911]: I0606 10:28:11.435040 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kbkl4"] Jun 06 10:28:11 crc kubenswrapper[4911]: I0606 10:28:11.972358 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" path="/var/lib/kubelet/pods/d1b60d44-533e-4c6c-9e8d-e2cc248e598d/volumes" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.214290 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-7dmgc"] Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.215124 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-7dmgc" podUID="23f1b779-49c2-4d9a-8a23-c870e790b3c4" containerName="container-00" containerID="cri-o://8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df" gracePeriod=2 Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.229839 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-7dmgc"] Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.339233 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7dmgc" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.408016 4911 generic.go:334] "Generic (PLEG): container finished" podID="23f1b779-49c2-4d9a-8a23-c870e790b3c4" containerID="8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df" exitCode=0 Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.408134 4911 scope.go:117] "RemoveContainer" containerID="8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.408318 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7dmgc" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.416950 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f1b779-49c2-4d9a-8a23-c870e790b3c4-host\") pod \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.417134 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brj77\" (UniqueName: \"kubernetes.io/projected/23f1b779-49c2-4d9a-8a23-c870e790b3c4-kube-api-access-brj77\") pod \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\" (UID: \"23f1b779-49c2-4d9a-8a23-c870e790b3c4\") " Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.417221 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23f1b779-49c2-4d9a-8a23-c870e790b3c4-host" (OuterVolumeSpecName: "host") pod "23f1b779-49c2-4d9a-8a23-c870e790b3c4" (UID: "23f1b779-49c2-4d9a-8a23-c870e790b3c4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.417914 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f1b779-49c2-4d9a-8a23-c870e790b3c4-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.427838 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23f1b779-49c2-4d9a-8a23-c870e790b3c4-kube-api-access-brj77" (OuterVolumeSpecName: "kube-api-access-brj77") pod "23f1b779-49c2-4d9a-8a23-c870e790b3c4" (UID: "23f1b779-49c2-4d9a-8a23-c870e790b3c4"). InnerVolumeSpecName "kube-api-access-brj77". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.445555 4911 scope.go:117] "RemoveContainer" containerID="8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df" Jun 06 10:28:13 crc kubenswrapper[4911]: E0606 10:28:13.446396 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df\": container with ID starting with 8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df not found: ID does not exist" containerID="8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.446438 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df"} err="failed to get container status \"8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df\": rpc error: code = NotFound desc = could not find container \"8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df\": container with ID starting with 8170803c00844d60defec916cdbe79fd0bd520d39fe99350c7324a5384b785df not found: ID does not exist" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.519811 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brj77\" (UniqueName: \"kubernetes.io/projected/23f1b779-49c2-4d9a-8a23-c870e790b3c4-kube-api-access-brj77\") on node \"crc\" DevicePath \"\"" Jun 06 10:28:13 crc kubenswrapper[4911]: I0606 10:28:13.963437 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23f1b779-49c2-4d9a-8a23-c870e790b3c4" path="/var/lib/kubelet/pods/23f1b779-49c2-4d9a-8a23-c870e790b3c4/volumes" Jun 06 10:28:20 crc kubenswrapper[4911]: I0606 10:28:20.948480 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:28:20 crc kubenswrapper[4911]: E0606 10:28:20.949477 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:28:22 crc kubenswrapper[4911]: I0606 10:28:22.587424 4911 scope.go:117] "RemoveContainer" containerID="dcb43cdd4d1a7dfcb7586faca260b51ea75379e5a9f84853bbbfee1058396429" Jun 06 10:28:31 crc kubenswrapper[4911]: I0606 10:28:31.948850 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:28:31 crc kubenswrapper[4911]: E0606 10:28:31.950477 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:28:43 crc kubenswrapper[4911]: I0606 10:28:43.948183 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:28:43 crc kubenswrapper[4911]: E0606 10:28:43.948996 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:28:56 crc kubenswrapper[4911]: I0606 10:28:56.949295 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:28:56 crc kubenswrapper[4911]: E0606 10:28:56.950374 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.633120 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-thnk8"] Jun 06 10:29:01 crc kubenswrapper[4911]: E0606 10:29:01.634602 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="registry-server" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.634628 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="registry-server" Jun 06 10:29:01 crc kubenswrapper[4911]: E0606 10:29:01.634650 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f1b779-49c2-4d9a-8a23-c870e790b3c4" containerName="container-00" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.634656 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f1b779-49c2-4d9a-8a23-c870e790b3c4" containerName="container-00" Jun 06 10:29:01 crc kubenswrapper[4911]: E0606 10:29:01.634672 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="extract-utilities" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.634680 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="extract-utilities" Jun 06 10:29:01 crc kubenswrapper[4911]: E0606 10:29:01.634726 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="extract-content" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.634733 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="extract-content" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.635005 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1b60d44-533e-4c6c-9e8d-e2cc248e598d" containerName="registry-server" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.635034 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="23f1b779-49c2-4d9a-8a23-c870e790b3c4" containerName="container-00" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.636084 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-thnk8" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.766594 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-host\") pod \"crc-debug-thnk8\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " pod="openstack/crc-debug-thnk8" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.766759 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwlph\" (UniqueName: \"kubernetes.io/projected/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-kube-api-access-jwlph\") pod \"crc-debug-thnk8\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " pod="openstack/crc-debug-thnk8" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.868819 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-host\") pod \"crc-debug-thnk8\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " pod="openstack/crc-debug-thnk8" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.868973 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwlph\" (UniqueName: \"kubernetes.io/projected/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-kube-api-access-jwlph\") pod \"crc-debug-thnk8\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " pod="openstack/crc-debug-thnk8" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.869008 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-host\") pod \"crc-debug-thnk8\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " pod="openstack/crc-debug-thnk8" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.893797 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwlph\" (UniqueName: \"kubernetes.io/projected/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-kube-api-access-jwlph\") pod \"crc-debug-thnk8\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " pod="openstack/crc-debug-thnk8" Jun 06 10:29:01 crc kubenswrapper[4911]: I0606 10:29:01.960659 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-thnk8" Jun 06 10:29:02 crc kubenswrapper[4911]: I0606 10:29:02.926627 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-thnk8" event={"ID":"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859","Type":"ContainerStarted","Data":"904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d"} Jun 06 10:29:02 crc kubenswrapper[4911]: I0606 10:29:02.928293 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-thnk8" event={"ID":"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859","Type":"ContainerStarted","Data":"42ead224aefee4190fc32e3f4a3e8b2d902d1aa69769c5839d9358f65c646cc5"} Jun 06 10:29:02 crc kubenswrapper[4911]: I0606 10:29:02.958695 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-thnk8" podStartSLOduration=1.958666827 podStartE2EDuration="1.958666827s" podCreationTimestamp="2025-06-06 10:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:29:02.943424796 +0000 UTC m=+4554.218850349" watchObservedRunningTime="2025-06-06 10:29:02.958666827 +0000 UTC m=+4554.234092370" Jun 06 10:29:11 crc kubenswrapper[4911]: I0606 10:29:11.949220 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:29:11 crc kubenswrapper[4911]: E0606 10:29:11.950581 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.684760 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-thnk8"] Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.685944 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-thnk8" podUID="84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" containerName="container-00" containerID="cri-o://904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d" gracePeriod=2 Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.710399 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-thnk8"] Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.814891 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-thnk8" Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.902031 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwlph\" (UniqueName: \"kubernetes.io/projected/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-kube-api-access-jwlph\") pod \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.902313 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-host\") pod \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\" (UID: \"84ddbc1b-62c4-40f6-b1fd-8d3626bc1859\") " Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.902442 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-host" (OuterVolumeSpecName: "host") pod "84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" (UID: "84ddbc1b-62c4-40f6-b1fd-8d3626bc1859"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.902943 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:29:12 crc kubenswrapper[4911]: I0606 10:29:12.910790 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-kube-api-access-jwlph" (OuterVolumeSpecName: "kube-api-access-jwlph") pod "84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" (UID: "84ddbc1b-62c4-40f6-b1fd-8d3626bc1859"). InnerVolumeSpecName "kube-api-access-jwlph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:29:13 crc kubenswrapper[4911]: I0606 10:29:13.005670 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwlph\" (UniqueName: \"kubernetes.io/projected/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859-kube-api-access-jwlph\") on node \"crc\" DevicePath \"\"" Jun 06 10:29:13 crc kubenswrapper[4911]: I0606 10:29:13.059650 4911 generic.go:334] "Generic (PLEG): container finished" podID="84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" containerID="904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d" exitCode=0 Jun 06 10:29:13 crc kubenswrapper[4911]: I0606 10:29:13.059762 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-thnk8" Jun 06 10:29:13 crc kubenswrapper[4911]: I0606 10:29:13.059773 4911 scope.go:117] "RemoveContainer" containerID="904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d" Jun 06 10:29:13 crc kubenswrapper[4911]: I0606 10:29:13.091462 4911 scope.go:117] "RemoveContainer" containerID="904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d" Jun 06 10:29:13 crc kubenswrapper[4911]: E0606 10:29:13.092154 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d\": container with ID starting with 904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d not found: ID does not exist" containerID="904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d" Jun 06 10:29:13 crc kubenswrapper[4911]: I0606 10:29:13.092224 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d"} err="failed to get container status \"904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d\": rpc error: code = NotFound desc = could not find container \"904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d\": container with ID starting with 904ff336015349b79cd4d4cb53025a0b5b219ec3b6b3a5d7df7468cb2617580d not found: ID does not exist" Jun 06 10:29:13 crc kubenswrapper[4911]: I0606 10:29:13.978502 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" path="/var/lib/kubelet/pods/84ddbc1b-62c4-40f6-b1fd-8d3626bc1859/volumes" Jun 06 10:29:22 crc kubenswrapper[4911]: I0606 10:29:22.949721 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:29:22 crc kubenswrapper[4911]: E0606 10:29:22.950886 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:29:37 crc kubenswrapper[4911]: I0606 10:29:37.947927 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:29:37 crc kubenswrapper[4911]: E0606 10:29:37.950728 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:29:50 crc kubenswrapper[4911]: I0606 10:29:50.948289 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:29:50 crc kubenswrapper[4911]: E0606 10:29:50.949334 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.174037 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw"] Jun 06 10:30:00 crc kubenswrapper[4911]: E0606 10:30:00.175255 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" containerName="container-00" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.175271 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" containerName="container-00" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.175549 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ddbc1b-62c4-40f6-b1fd-8d3626bc1859" containerName="container-00" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.176483 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.181082 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.181202 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.189813 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw"] Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.282955 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de536cbf-2e68-403a-99fb-825113324139-config-volume\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.283816 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/de536cbf-2e68-403a-99fb-825113324139-secret-volume\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.284209 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpj8x\" (UniqueName: \"kubernetes.io/projected/de536cbf-2e68-403a-99fb-825113324139-kube-api-access-jpj8x\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.386845 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpj8x\" (UniqueName: \"kubernetes.io/projected/de536cbf-2e68-403a-99fb-825113324139-kube-api-access-jpj8x\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.386996 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de536cbf-2e68-403a-99fb-825113324139-config-volume\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.387122 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/de536cbf-2e68-403a-99fb-825113324139-secret-volume\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.388531 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de536cbf-2e68-403a-99fb-825113324139-config-volume\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.438279 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/de536cbf-2e68-403a-99fb-825113324139-secret-volume\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.438339 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpj8x\" (UniqueName: \"kubernetes.io/projected/de536cbf-2e68-403a-99fb-825113324139-kube-api-access-jpj8x\") pod \"collect-profiles-29153430-p5jsw\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:00 crc kubenswrapper[4911]: I0606 10:30:00.513994 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:01 crc kubenswrapper[4911]: I0606 10:30:01.220868 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw"] Jun 06 10:30:01 crc kubenswrapper[4911]: I0606 10:30:01.642738 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" event={"ID":"de536cbf-2e68-403a-99fb-825113324139","Type":"ContainerStarted","Data":"36fe398d8c3e70add4a36588a0eaa97ccac6690a2db6650a4607baefa0a5db0e"} Jun 06 10:30:01 crc kubenswrapper[4911]: I0606 10:30:01.643307 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" event={"ID":"de536cbf-2e68-403a-99fb-825113324139","Type":"ContainerStarted","Data":"fdd1c449b3fad915d856cbaaab854a058a45c78bc1106cf1d0a68d4463d52158"} Jun 06 10:30:01 crc kubenswrapper[4911]: I0606 10:30:01.667758 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" podStartSLOduration=1.667736391 podStartE2EDuration="1.667736391s" podCreationTimestamp="2025-06-06 10:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:30:01.661337926 +0000 UTC m=+4612.936763479" watchObservedRunningTime="2025-06-06 10:30:01.667736391 +0000 UTC m=+4612.943161934" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.038230 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-d6bkh"] Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.041265 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.138862 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dch88\" (UniqueName: \"kubernetes.io/projected/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-kube-api-access-dch88\") pod \"crc-debug-d6bkh\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.139533 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-host\") pod \"crc-debug-d6bkh\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.242330 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dch88\" (UniqueName: \"kubernetes.io/projected/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-kube-api-access-dch88\") pod \"crc-debug-d6bkh\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.242613 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-host\") pod \"crc-debug-d6bkh\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.242779 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-host\") pod \"crc-debug-d6bkh\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.266009 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dch88\" (UniqueName: \"kubernetes.io/projected/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-kube-api-access-dch88\") pod \"crc-debug-d6bkh\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.365525 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d6bkh" Jun 06 10:30:02 crc kubenswrapper[4911]: W0606 10:30:02.424621 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29c00f0f_32d1_4797_b1a6_a6c82b01bb69.slice/crio-7da73a67aea949cad26b2e4c1fb06b3bfe4cd1d5071d015b1167374d096e27d3 WatchSource:0}: Error finding container 7da73a67aea949cad26b2e4c1fb06b3bfe4cd1d5071d015b1167374d096e27d3: Status 404 returned error can't find the container with id 7da73a67aea949cad26b2e4c1fb06b3bfe4cd1d5071d015b1167374d096e27d3 Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.655158 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-d6bkh" event={"ID":"29c00f0f-32d1-4797-b1a6-a6c82b01bb69","Type":"ContainerStarted","Data":"7da73a67aea949cad26b2e4c1fb06b3bfe4cd1d5071d015b1167374d096e27d3"} Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.658358 4911 generic.go:334] "Generic (PLEG): container finished" podID="de536cbf-2e68-403a-99fb-825113324139" containerID="36fe398d8c3e70add4a36588a0eaa97ccac6690a2db6650a4607baefa0a5db0e" exitCode=0 Jun 06 10:30:02 crc kubenswrapper[4911]: I0606 10:30:02.658402 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" event={"ID":"de536cbf-2e68-403a-99fb-825113324139","Type":"ContainerDied","Data":"36fe398d8c3e70add4a36588a0eaa97ccac6690a2db6650a4607baefa0a5db0e"} Jun 06 10:30:03 crc kubenswrapper[4911]: I0606 10:30:03.674592 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-d6bkh" event={"ID":"29c00f0f-32d1-4797-b1a6-a6c82b01bb69","Type":"ContainerStarted","Data":"ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064"} Jun 06 10:30:03 crc kubenswrapper[4911]: I0606 10:30:03.698144 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-d6bkh" podStartSLOduration=1.698114092 podStartE2EDuration="1.698114092s" podCreationTimestamp="2025-06-06 10:30:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:30:03.68869391 +0000 UTC m=+4614.964119463" watchObservedRunningTime="2025-06-06 10:30:03.698114092 +0000 UTC m=+4614.973539635" Jun 06 10:30:04 crc kubenswrapper[4911]: I0606 10:30:04.878330 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:04 crc kubenswrapper[4911]: I0606 10:30:04.923843 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de536cbf-2e68-403a-99fb-825113324139-config-volume\") pod \"de536cbf-2e68-403a-99fb-825113324139\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " Jun 06 10:30:04 crc kubenswrapper[4911]: I0606 10:30:04.923944 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpj8x\" (UniqueName: \"kubernetes.io/projected/de536cbf-2e68-403a-99fb-825113324139-kube-api-access-jpj8x\") pod \"de536cbf-2e68-403a-99fb-825113324139\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " Jun 06 10:30:04 crc kubenswrapper[4911]: I0606 10:30:04.924052 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/de536cbf-2e68-403a-99fb-825113324139-secret-volume\") pod \"de536cbf-2e68-403a-99fb-825113324139\" (UID: \"de536cbf-2e68-403a-99fb-825113324139\") " Jun 06 10:30:04 crc kubenswrapper[4911]: I0606 10:30:04.925356 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de536cbf-2e68-403a-99fb-825113324139-config-volume" (OuterVolumeSpecName: "config-volume") pod "de536cbf-2e68-403a-99fb-825113324139" (UID: "de536cbf-2e68-403a-99fb-825113324139"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 10:30:04 crc kubenswrapper[4911]: I0606 10:30:04.932334 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de536cbf-2e68-403a-99fb-825113324139-kube-api-access-jpj8x" (OuterVolumeSpecName: "kube-api-access-jpj8x") pod "de536cbf-2e68-403a-99fb-825113324139" (UID: "de536cbf-2e68-403a-99fb-825113324139"). InnerVolumeSpecName "kube-api-access-jpj8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:30:04 crc kubenswrapper[4911]: I0606 10:30:04.932506 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de536cbf-2e68-403a-99fb-825113324139-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "de536cbf-2e68-403a-99fb-825113324139" (UID: "de536cbf-2e68-403a-99fb-825113324139"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.026350 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/de536cbf-2e68-403a-99fb-825113324139-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.026410 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpj8x\" (UniqueName: \"kubernetes.io/projected/de536cbf-2e68-403a-99fb-825113324139-kube-api-access-jpj8x\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.026427 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/de536cbf-2e68-403a-99fb-825113324139-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.698792 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" event={"ID":"de536cbf-2e68-403a-99fb-825113324139","Type":"ContainerDied","Data":"fdd1c449b3fad915d856cbaaab854a058a45c78bc1106cf1d0a68d4463d52158"} Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.699344 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdd1c449b3fad915d856cbaaab854a058a45c78bc1106cf1d0a68d4463d52158" Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.698879 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153430-p5jsw" Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.948533 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.974464 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6"] Jun 06 10:30:05 crc kubenswrapper[4911]: I0606 10:30:05.986210 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153385-9gbp6"] Jun 06 10:30:06 crc kubenswrapper[4911]: I0606 10:30:06.713511 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"8718fb62374e5accd6e02ad687ffd15b81dad7742fe81c95f29f54f27db70ac9"} Jun 06 10:30:07 crc kubenswrapper[4911]: I0606 10:30:07.960159 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="290d083a-2027-4f99-b6ab-6432beb132f6" path="/var/lib/kubelet/pods/290d083a-2027-4f99-b6ab-6432beb132f6/volumes" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.039465 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-d6bkh"] Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.041519 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-d6bkh" podUID="29c00f0f-32d1-4797-b1a6-a6c82b01bb69" containerName="container-00" containerID="cri-o://ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064" gracePeriod=2 Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.057789 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-d6bkh"] Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.139546 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d6bkh" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.166518 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-host\") pod \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.166581 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dch88\" (UniqueName: \"kubernetes.io/projected/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-kube-api-access-dch88\") pod \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\" (UID: \"29c00f0f-32d1-4797-b1a6-a6c82b01bb69\") " Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.166651 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-host" (OuterVolumeSpecName: "host") pod "29c00f0f-32d1-4797-b1a6-a6c82b01bb69" (UID: "29c00f0f-32d1-4797-b1a6-a6c82b01bb69"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.166899 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.176044 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-kube-api-access-dch88" (OuterVolumeSpecName: "kube-api-access-dch88") pod "29c00f0f-32d1-4797-b1a6-a6c82b01bb69" (UID: "29c00f0f-32d1-4797-b1a6-a6c82b01bb69"). InnerVolumeSpecName "kube-api-access-dch88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.268013 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dch88\" (UniqueName: \"kubernetes.io/projected/29c00f0f-32d1-4797-b1a6-a6c82b01bb69-kube-api-access-dch88\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.789669 4911 generic.go:334] "Generic (PLEG): container finished" podID="29c00f0f-32d1-4797-b1a6-a6c82b01bb69" containerID="ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064" exitCode=0 Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.789746 4911 scope.go:117] "RemoveContainer" containerID="ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.789797 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d6bkh" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.821856 4911 scope.go:117] "RemoveContainer" containerID="ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064" Jun 06 10:30:13 crc kubenswrapper[4911]: E0606 10:30:13.822337 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064\": container with ID starting with ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064 not found: ID does not exist" containerID="ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.822376 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064"} err="failed to get container status \"ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064\": rpc error: code = NotFound desc = could not find container \"ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064\": container with ID starting with ad4218014d84fa5171afe561862683025453520c655b2d9c2a097f09f1e1e064 not found: ID does not exist" Jun 06 10:30:13 crc kubenswrapper[4911]: I0606 10:30:13.967684 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c00f0f-32d1-4797-b1a6-a6c82b01bb69" path="/var/lib/kubelet/pods/29c00f0f-32d1-4797-b1a6-a6c82b01bb69/volumes" Jun 06 10:30:22 crc kubenswrapper[4911]: I0606 10:30:22.720744 4911 scope.go:117] "RemoveContainer" containerID="afac12540d29e80df37169b6793b23c27ae049cec25944b35fe2e70bbbfb985f" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.500717 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vsx7j"] Jun 06 10:30:28 crc kubenswrapper[4911]: E0606 10:30:28.502061 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de536cbf-2e68-403a-99fb-825113324139" containerName="collect-profiles" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.502080 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="de536cbf-2e68-403a-99fb-825113324139" containerName="collect-profiles" Jun 06 10:30:28 crc kubenswrapper[4911]: E0606 10:30:28.502140 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c00f0f-32d1-4797-b1a6-a6c82b01bb69" containerName="container-00" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.502147 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c00f0f-32d1-4797-b1a6-a6c82b01bb69" containerName="container-00" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.502379 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="de536cbf-2e68-403a-99fb-825113324139" containerName="collect-profiles" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.502397 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c00f0f-32d1-4797-b1a6-a6c82b01bb69" containerName="container-00" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.504072 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.513365 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vsx7j"] Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.553588 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-catalog-content\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.553678 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfvsz\" (UniqueName: \"kubernetes.io/projected/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-kube-api-access-gfvsz\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.553861 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-utilities\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.655549 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-utilities\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.655646 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-catalog-content\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.655683 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfvsz\" (UniqueName: \"kubernetes.io/projected/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-kube-api-access-gfvsz\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.656525 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-utilities\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.656752 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-catalog-content\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.682549 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfvsz\" (UniqueName: \"kubernetes.io/projected/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-kube-api-access-gfvsz\") pod \"certified-operators-vsx7j\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:28 crc kubenswrapper[4911]: I0606 10:30:28.869902 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:29 crc kubenswrapper[4911]: I0606 10:30:29.568644 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vsx7j"] Jun 06 10:30:29 crc kubenswrapper[4911]: I0606 10:30:29.989020 4911 generic.go:334] "Generic (PLEG): container finished" podID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerID="90b35623e45c4e002433ea1065746fb4e76c5d114fb734593defb708b596657f" exitCode=0 Jun 06 10:30:29 crc kubenswrapper[4911]: I0606 10:30:29.989111 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vsx7j" event={"ID":"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e","Type":"ContainerDied","Data":"90b35623e45c4e002433ea1065746fb4e76c5d114fb734593defb708b596657f"} Jun 06 10:30:29 crc kubenswrapper[4911]: I0606 10:30:29.989434 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vsx7j" event={"ID":"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e","Type":"ContainerStarted","Data":"54661ddf359e9f329e95da623da7ba1c07cb4dc1a3c5229a582fe59be398dee1"} Jun 06 10:30:32 crc kubenswrapper[4911]: I0606 10:30:32.018267 4911 generic.go:334] "Generic (PLEG): container finished" podID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerID="3a3a004dd7c02914d325911b107c013bae6ee5dcd996c29de7c14a309c04a48c" exitCode=0 Jun 06 10:30:32 crc kubenswrapper[4911]: I0606 10:30:32.018342 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vsx7j" event={"ID":"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e","Type":"ContainerDied","Data":"3a3a004dd7c02914d325911b107c013bae6ee5dcd996c29de7c14a309c04a48c"} Jun 06 10:30:33 crc kubenswrapper[4911]: I0606 10:30:33.032681 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vsx7j" event={"ID":"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e","Type":"ContainerStarted","Data":"4885ba43451a0ed5f33913f631d61ff86973a453d3da33e9121e362b669b8d46"} Jun 06 10:30:33 crc kubenswrapper[4911]: I0606 10:30:33.053154 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vsx7j" podStartSLOduration=2.536918902 podStartE2EDuration="5.053125315s" podCreationTimestamp="2025-06-06 10:30:28 +0000 UTC" firstStartedPulling="2025-06-06 10:30:29.994766413 +0000 UTC m=+4641.270191956" lastFinishedPulling="2025-06-06 10:30:32.510972826 +0000 UTC m=+4643.786398369" observedRunningTime="2025-06-06 10:30:33.052022346 +0000 UTC m=+4644.327447919" watchObservedRunningTime="2025-06-06 10:30:33.053125315 +0000 UTC m=+4644.328550858" Jun 06 10:30:38 crc kubenswrapper[4911]: I0606 10:30:38.870834 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:38 crc kubenswrapper[4911]: I0606 10:30:38.871776 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:38 crc kubenswrapper[4911]: I0606 10:30:38.934622 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:39 crc kubenswrapper[4911]: I0606 10:30:39.163915 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:39 crc kubenswrapper[4911]: I0606 10:30:39.224463 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vsx7j"] Jun 06 10:30:41 crc kubenswrapper[4911]: I0606 10:30:41.129221 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vsx7j" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="registry-server" containerID="cri-o://4885ba43451a0ed5f33913f631d61ff86973a453d3da33e9121e362b669b8d46" gracePeriod=2 Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.143848 4911 generic.go:334] "Generic (PLEG): container finished" podID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerID="4885ba43451a0ed5f33913f631d61ff86973a453d3da33e9121e362b669b8d46" exitCode=0 Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.143922 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vsx7j" event={"ID":"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e","Type":"ContainerDied","Data":"4885ba43451a0ed5f33913f631d61ff86973a453d3da33e9121e362b669b8d46"} Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.579604 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.700213 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-catalog-content\") pod \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.700385 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-utilities\") pod \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.700710 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfvsz\" (UniqueName: \"kubernetes.io/projected/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-kube-api-access-gfvsz\") pod \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\" (UID: \"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e\") " Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.701254 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-utilities" (OuterVolumeSpecName: "utilities") pod "0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" (UID: "0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.701913 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.710351 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-kube-api-access-gfvsz" (OuterVolumeSpecName: "kube-api-access-gfvsz") pod "0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" (UID: "0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e"). InnerVolumeSpecName "kube-api-access-gfvsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.732668 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" (UID: "0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.802838 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfvsz\" (UniqueName: \"kubernetes.io/projected/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-kube-api-access-gfvsz\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:42 crc kubenswrapper[4911]: I0606 10:30:42.802886 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.157465 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vsx7j" event={"ID":"0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e","Type":"ContainerDied","Data":"54661ddf359e9f329e95da623da7ba1c07cb4dc1a3c5229a582fe59be398dee1"} Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.158767 4911 scope.go:117] "RemoveContainer" containerID="4885ba43451a0ed5f33913f631d61ff86973a453d3da33e9121e362b669b8d46" Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.157612 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vsx7j" Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.190866 4911 scope.go:117] "RemoveContainer" containerID="3a3a004dd7c02914d325911b107c013bae6ee5dcd996c29de7c14a309c04a48c" Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.201940 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vsx7j"] Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.213569 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vsx7j"] Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.237605 4911 scope.go:117] "RemoveContainer" containerID="90b35623e45c4e002433ea1065746fb4e76c5d114fb734593defb708b596657f" Jun 06 10:30:43 crc kubenswrapper[4911]: I0606 10:30:43.962996 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" path="/var/lib/kubelet/pods/0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e/volumes" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.431451 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-fnzrn"] Jun 06 10:31:01 crc kubenswrapper[4911]: E0606 10:31:01.432688 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="extract-utilities" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.432709 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="extract-utilities" Jun 06 10:31:01 crc kubenswrapper[4911]: E0606 10:31:01.432735 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="extract-content" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.432741 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="extract-content" Jun 06 10:31:01 crc kubenswrapper[4911]: E0606 10:31:01.432757 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="registry-server" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.432764 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="registry-server" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.433032 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d9c6908-f3ba-4ddf-ae00-24a9252b3f0e" containerName="registry-server" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.434045 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-fnzrn" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.564558 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l95w\" (UniqueName: \"kubernetes.io/projected/492f750e-09f5-4ec0-97ef-421cc0880ebf-kube-api-access-4l95w\") pod \"crc-debug-fnzrn\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " pod="openstack/crc-debug-fnzrn" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.564907 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492f750e-09f5-4ec0-97ef-421cc0880ebf-host\") pod \"crc-debug-fnzrn\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " pod="openstack/crc-debug-fnzrn" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.668650 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492f750e-09f5-4ec0-97ef-421cc0880ebf-host\") pod \"crc-debug-fnzrn\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " pod="openstack/crc-debug-fnzrn" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.668936 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492f750e-09f5-4ec0-97ef-421cc0880ebf-host\") pod \"crc-debug-fnzrn\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " pod="openstack/crc-debug-fnzrn" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.669036 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l95w\" (UniqueName: \"kubernetes.io/projected/492f750e-09f5-4ec0-97ef-421cc0880ebf-kube-api-access-4l95w\") pod \"crc-debug-fnzrn\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " pod="openstack/crc-debug-fnzrn" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.700435 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l95w\" (UniqueName: \"kubernetes.io/projected/492f750e-09f5-4ec0-97ef-421cc0880ebf-kube-api-access-4l95w\") pod \"crc-debug-fnzrn\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " pod="openstack/crc-debug-fnzrn" Jun 06 10:31:01 crc kubenswrapper[4911]: I0606 10:31:01.759627 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-fnzrn" Jun 06 10:31:02 crc kubenswrapper[4911]: I0606 10:31:02.377814 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-fnzrn" event={"ID":"492f750e-09f5-4ec0-97ef-421cc0880ebf","Type":"ContainerStarted","Data":"52f702f1f391e284902d6d0c2ae41c0c72028aaeb594aef6edc1c1b0505efaaa"} Jun 06 10:31:03 crc kubenswrapper[4911]: I0606 10:31:03.393299 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-fnzrn" event={"ID":"492f750e-09f5-4ec0-97ef-421cc0880ebf","Type":"ContainerStarted","Data":"f966634638bfe8cecf540717a10c7d58abe6983d7571f4f3c39e0b63fa2f13c6"} Jun 06 10:31:03 crc kubenswrapper[4911]: I0606 10:31:03.422253 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-fnzrn" podStartSLOduration=2.422219906 podStartE2EDuration="2.422219906s" podCreationTimestamp="2025-06-06 10:31:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:31:03.413510572 +0000 UTC m=+4674.688936115" watchObservedRunningTime="2025-06-06 10:31:03.422219906 +0000 UTC m=+4674.697645449" Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.130839 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-fnzrn"] Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.132334 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-fnzrn" podUID="492f750e-09f5-4ec0-97ef-421cc0880ebf" containerName="container-00" containerID="cri-o://f966634638bfe8cecf540717a10c7d58abe6983d7571f4f3c39e0b63fa2f13c6" gracePeriod=2 Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.191695 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-fnzrn"] Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.514849 4911 generic.go:334] "Generic (PLEG): container finished" podID="492f750e-09f5-4ec0-97ef-421cc0880ebf" containerID="f966634638bfe8cecf540717a10c7d58abe6983d7571f4f3c39e0b63fa2f13c6" exitCode=0 Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.611369 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-fnzrn" Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.795608 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492f750e-09f5-4ec0-97ef-421cc0880ebf-host\") pod \"492f750e-09f5-4ec0-97ef-421cc0880ebf\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.795734 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/492f750e-09f5-4ec0-97ef-421cc0880ebf-host" (OuterVolumeSpecName: "host") pod "492f750e-09f5-4ec0-97ef-421cc0880ebf" (UID: "492f750e-09f5-4ec0-97ef-421cc0880ebf"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.795835 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l95w\" (UniqueName: \"kubernetes.io/projected/492f750e-09f5-4ec0-97ef-421cc0880ebf-kube-api-access-4l95w\") pod \"492f750e-09f5-4ec0-97ef-421cc0880ebf\" (UID: \"492f750e-09f5-4ec0-97ef-421cc0880ebf\") " Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.796553 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492f750e-09f5-4ec0-97ef-421cc0880ebf-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.811400 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/492f750e-09f5-4ec0-97ef-421cc0880ebf-kube-api-access-4l95w" (OuterVolumeSpecName: "kube-api-access-4l95w") pod "492f750e-09f5-4ec0-97ef-421cc0880ebf" (UID: "492f750e-09f5-4ec0-97ef-421cc0880ebf"). InnerVolumeSpecName "kube-api-access-4l95w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.899896 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l95w\" (UniqueName: \"kubernetes.io/projected/492f750e-09f5-4ec0-97ef-421cc0880ebf-kube-api-access-4l95w\") on node \"crc\" DevicePath \"\"" Jun 06 10:31:13 crc kubenswrapper[4911]: I0606 10:31:13.962465 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="492f750e-09f5-4ec0-97ef-421cc0880ebf" path="/var/lib/kubelet/pods/492f750e-09f5-4ec0-97ef-421cc0880ebf/volumes" Jun 06 10:31:14 crc kubenswrapper[4911]: I0606 10:31:14.525834 4911 scope.go:117] "RemoveContainer" containerID="f966634638bfe8cecf540717a10c7d58abe6983d7571f4f3c39e0b63fa2f13c6" Jun 06 10:31:14 crc kubenswrapper[4911]: I0606 10:31:14.526170 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-fnzrn" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.485264 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-hxszt"] Jun 06 10:32:01 crc kubenswrapper[4911]: E0606 10:32:01.486797 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="492f750e-09f5-4ec0-97ef-421cc0880ebf" containerName="container-00" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.486820 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="492f750e-09f5-4ec0-97ef-421cc0880ebf" containerName="container-00" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.487044 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="492f750e-09f5-4ec0-97ef-421cc0880ebf" containerName="container-00" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.487785 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hxszt" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.607362 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fns9s\" (UniqueName: \"kubernetes.io/projected/7c84040f-45ca-4d82-a3fc-d66f4304a176-kube-api-access-fns9s\") pod \"crc-debug-hxszt\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " pod="openstack/crc-debug-hxszt" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.607420 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7c84040f-45ca-4d82-a3fc-d66f4304a176-host\") pod \"crc-debug-hxszt\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " pod="openstack/crc-debug-hxszt" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.709615 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fns9s\" (UniqueName: \"kubernetes.io/projected/7c84040f-45ca-4d82-a3fc-d66f4304a176-kube-api-access-fns9s\") pod \"crc-debug-hxszt\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " pod="openstack/crc-debug-hxszt" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.709710 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7c84040f-45ca-4d82-a3fc-d66f4304a176-host\") pod \"crc-debug-hxszt\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " pod="openstack/crc-debug-hxszt" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.709961 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7c84040f-45ca-4d82-a3fc-d66f4304a176-host\") pod \"crc-debug-hxszt\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " pod="openstack/crc-debug-hxszt" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.734262 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fns9s\" (UniqueName: \"kubernetes.io/projected/7c84040f-45ca-4d82-a3fc-d66f4304a176-kube-api-access-fns9s\") pod \"crc-debug-hxszt\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " pod="openstack/crc-debug-hxszt" Jun 06 10:32:01 crc kubenswrapper[4911]: I0606 10:32:01.812071 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hxszt" Jun 06 10:32:02 crc kubenswrapper[4911]: I0606 10:32:02.089550 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-hxszt" event={"ID":"7c84040f-45ca-4d82-a3fc-d66f4304a176","Type":"ContainerStarted","Data":"3d2981c76ec957e8b64e03a8e0a521196296086a285814d90aac26103ce98eb3"} Jun 06 10:32:03 crc kubenswrapper[4911]: I0606 10:32:03.103992 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-hxszt" event={"ID":"7c84040f-45ca-4d82-a3fc-d66f4304a176","Type":"ContainerStarted","Data":"f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311"} Jun 06 10:32:03 crc kubenswrapper[4911]: I0606 10:32:03.128647 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-hxszt" podStartSLOduration=2.128611879 podStartE2EDuration="2.128611879s" podCreationTimestamp="2025-06-06 10:32:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:32:03.12047387 +0000 UTC m=+4734.395899413" watchObservedRunningTime="2025-06-06 10:32:03.128611879 +0000 UTC m=+4734.404037442" Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.510691 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-hxszt"] Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.511663 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-hxszt" podUID="7c84040f-45ca-4d82-a3fc-d66f4304a176" containerName="container-00" containerID="cri-o://f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311" gracePeriod=2 Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.521075 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-hxszt"] Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.606720 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hxszt" Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.698585 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7c84040f-45ca-4d82-a3fc-d66f4304a176-host\") pod \"7c84040f-45ca-4d82-a3fc-d66f4304a176\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.698829 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c84040f-45ca-4d82-a3fc-d66f4304a176-host" (OuterVolumeSpecName: "host") pod "7c84040f-45ca-4d82-a3fc-d66f4304a176" (UID: "7c84040f-45ca-4d82-a3fc-d66f4304a176"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.699014 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fns9s\" (UniqueName: \"kubernetes.io/projected/7c84040f-45ca-4d82-a3fc-d66f4304a176-kube-api-access-fns9s\") pod \"7c84040f-45ca-4d82-a3fc-d66f4304a176\" (UID: \"7c84040f-45ca-4d82-a3fc-d66f4304a176\") " Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.699692 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7c84040f-45ca-4d82-a3fc-d66f4304a176-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.718463 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c84040f-45ca-4d82-a3fc-d66f4304a176-kube-api-access-fns9s" (OuterVolumeSpecName: "kube-api-access-fns9s") pod "7c84040f-45ca-4d82-a3fc-d66f4304a176" (UID: "7c84040f-45ca-4d82-a3fc-d66f4304a176"). InnerVolumeSpecName "kube-api-access-fns9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:32:12 crc kubenswrapper[4911]: I0606 10:32:12.801957 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fns9s\" (UniqueName: \"kubernetes.io/projected/7c84040f-45ca-4d82-a3fc-d66f4304a176-kube-api-access-fns9s\") on node \"crc\" DevicePath \"\"" Jun 06 10:32:13 crc kubenswrapper[4911]: I0606 10:32:13.210627 4911 generic.go:334] "Generic (PLEG): container finished" podID="7c84040f-45ca-4d82-a3fc-d66f4304a176" containerID="f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311" exitCode=0 Jun 06 10:32:13 crc kubenswrapper[4911]: I0606 10:32:13.211109 4911 scope.go:117] "RemoveContainer" containerID="f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311" Jun 06 10:32:13 crc kubenswrapper[4911]: I0606 10:32:13.211307 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hxszt" Jun 06 10:32:13 crc kubenswrapper[4911]: I0606 10:32:13.254357 4911 scope.go:117] "RemoveContainer" containerID="f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311" Jun 06 10:32:13 crc kubenswrapper[4911]: E0606 10:32:13.259303 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311\": container with ID starting with f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311 not found: ID does not exist" containerID="f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311" Jun 06 10:32:13 crc kubenswrapper[4911]: I0606 10:32:13.259364 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311"} err="failed to get container status \"f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311\": rpc error: code = NotFound desc = could not find container \"f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311\": container with ID starting with f0cdc847560aaa55749edd2c12a11ff8c9f52b00f9a7f691f47da20a91d5c311 not found: ID does not exist" Jun 06 10:32:13 crc kubenswrapper[4911]: I0606 10:32:13.959306 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c84040f-45ca-4d82-a3fc-d66f4304a176" path="/var/lib/kubelet/pods/7c84040f-45ca-4d82-a3fc-d66f4304a176/volumes" Jun 06 10:32:22 crc kubenswrapper[4911]: I0606 10:32:22.896032 4911 scope.go:117] "RemoveContainer" containerID="ee761082244c084cb4e1846c3001f07602b8bf1ff2d63653bf2e1111a08683b1" Jun 06 10:32:24 crc kubenswrapper[4911]: I0606 10:32:24.299976 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:32:24 crc kubenswrapper[4911]: I0606 10:32:24.300717 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.311468 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4bbsh"] Jun 06 10:32:27 crc kubenswrapper[4911]: E0606 10:32:27.312584 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c84040f-45ca-4d82-a3fc-d66f4304a176" containerName="container-00" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.312607 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c84040f-45ca-4d82-a3fc-d66f4304a176" containerName="container-00" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.312858 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c84040f-45ca-4d82-a3fc-d66f4304a176" containerName="container-00" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.315113 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.345765 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bbsh"] Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.374997 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-utilities\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.375137 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-catalog-content\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.375181 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jbc8\" (UniqueName: \"kubernetes.io/projected/f65dca56-2ff8-4b47-83f2-55116cd21888-kube-api-access-5jbc8\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.477062 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-utilities\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.477546 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-catalog-content\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.477643 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jbc8\" (UniqueName: \"kubernetes.io/projected/f65dca56-2ff8-4b47-83f2-55116cd21888-kube-api-access-5jbc8\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.478591 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-utilities\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.483554 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-catalog-content\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.509975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jbc8\" (UniqueName: \"kubernetes.io/projected/f65dca56-2ff8-4b47-83f2-55116cd21888-kube-api-access-5jbc8\") pod \"redhat-marketplace-4bbsh\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:27 crc kubenswrapper[4911]: I0606 10:32:27.676825 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:28 crc kubenswrapper[4911]: I0606 10:32:28.392320 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bbsh"] Jun 06 10:32:28 crc kubenswrapper[4911]: W0606 10:32:28.400066 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf65dca56_2ff8_4b47_83f2_55116cd21888.slice/crio-10a3e69f8b1b17ec79f1c694b946077caf0f338134e24a99652034676787b160 WatchSource:0}: Error finding container 10a3e69f8b1b17ec79f1c694b946077caf0f338134e24a99652034676787b160: Status 404 returned error can't find the container with id 10a3e69f8b1b17ec79f1c694b946077caf0f338134e24a99652034676787b160 Jun 06 10:32:29 crc kubenswrapper[4911]: I0606 10:32:29.384244 4911 generic.go:334] "Generic (PLEG): container finished" podID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerID="6045aa27ce0db1b59f5b41fa833bf4a8f45fa57765cbfeed5c553df6655d48ff" exitCode=0 Jun 06 10:32:29 crc kubenswrapper[4911]: I0606 10:32:29.384320 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bbsh" event={"ID":"f65dca56-2ff8-4b47-83f2-55116cd21888","Type":"ContainerDied","Data":"6045aa27ce0db1b59f5b41fa833bf4a8f45fa57765cbfeed5c553df6655d48ff"} Jun 06 10:32:29 crc kubenswrapper[4911]: I0606 10:32:29.384720 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bbsh" event={"ID":"f65dca56-2ff8-4b47-83f2-55116cd21888","Type":"ContainerStarted","Data":"10a3e69f8b1b17ec79f1c694b946077caf0f338134e24a99652034676787b160"} Jun 06 10:32:29 crc kubenswrapper[4911]: I0606 10:32:29.387621 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:32:30 crc kubenswrapper[4911]: I0606 10:32:30.403989 4911 generic.go:334] "Generic (PLEG): container finished" podID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerID="74fd688852e3ed02866c034029b47ab3d60aba4e3b1224fd858078eb2cce12a2" exitCode=0 Jun 06 10:32:30 crc kubenswrapper[4911]: I0606 10:32:30.404077 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bbsh" event={"ID":"f65dca56-2ff8-4b47-83f2-55116cd21888","Type":"ContainerDied","Data":"74fd688852e3ed02866c034029b47ab3d60aba4e3b1224fd858078eb2cce12a2"} Jun 06 10:32:31 crc kubenswrapper[4911]: I0606 10:32:31.417118 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bbsh" event={"ID":"f65dca56-2ff8-4b47-83f2-55116cd21888","Type":"ContainerStarted","Data":"89708b448c3fd309b0827efd6b1b7432a42b93c5a35be4d3d8a980a77094284f"} Jun 06 10:32:31 crc kubenswrapper[4911]: I0606 10:32:31.455181 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4bbsh" podStartSLOduration=2.729554164 podStartE2EDuration="4.4551395s" podCreationTimestamp="2025-06-06 10:32:27 +0000 UTC" firstStartedPulling="2025-06-06 10:32:29.387302184 +0000 UTC m=+4760.662727727" lastFinishedPulling="2025-06-06 10:32:31.11288752 +0000 UTC m=+4762.388313063" observedRunningTime="2025-06-06 10:32:31.440267657 +0000 UTC m=+4762.715693210" watchObservedRunningTime="2025-06-06 10:32:31.4551395 +0000 UTC m=+4762.730565043" Jun 06 10:32:37 crc kubenswrapper[4911]: I0606 10:32:37.677919 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:37 crc kubenswrapper[4911]: I0606 10:32:37.678733 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:37 crc kubenswrapper[4911]: I0606 10:32:37.747623 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:38 crc kubenswrapper[4911]: I0606 10:32:38.572417 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:38 crc kubenswrapper[4911]: I0606 10:32:38.644435 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bbsh"] Jun 06 10:32:40 crc kubenswrapper[4911]: I0606 10:32:40.511051 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4bbsh" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="registry-server" containerID="cri-o://89708b448c3fd309b0827efd6b1b7432a42b93c5a35be4d3d8a980a77094284f" gracePeriod=2 Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.524334 4911 generic.go:334] "Generic (PLEG): container finished" podID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerID="89708b448c3fd309b0827efd6b1b7432a42b93c5a35be4d3d8a980a77094284f" exitCode=0 Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.524401 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bbsh" event={"ID":"f65dca56-2ff8-4b47-83f2-55116cd21888","Type":"ContainerDied","Data":"89708b448c3fd309b0827efd6b1b7432a42b93c5a35be4d3d8a980a77094284f"} Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.843818 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.929995 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-catalog-content\") pod \"f65dca56-2ff8-4b47-83f2-55116cd21888\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.930326 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-utilities\") pod \"f65dca56-2ff8-4b47-83f2-55116cd21888\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.930560 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jbc8\" (UniqueName: \"kubernetes.io/projected/f65dca56-2ff8-4b47-83f2-55116cd21888-kube-api-access-5jbc8\") pod \"f65dca56-2ff8-4b47-83f2-55116cd21888\" (UID: \"f65dca56-2ff8-4b47-83f2-55116cd21888\") " Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.931629 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-utilities" (OuterVolumeSpecName: "utilities") pod "f65dca56-2ff8-4b47-83f2-55116cd21888" (UID: "f65dca56-2ff8-4b47-83f2-55116cd21888"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.939322 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65dca56-2ff8-4b47-83f2-55116cd21888-kube-api-access-5jbc8" (OuterVolumeSpecName: "kube-api-access-5jbc8") pod "f65dca56-2ff8-4b47-83f2-55116cd21888" (UID: "f65dca56-2ff8-4b47-83f2-55116cd21888"). InnerVolumeSpecName "kube-api-access-5jbc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:32:41 crc kubenswrapper[4911]: I0606 10:32:41.943380 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f65dca56-2ff8-4b47-83f2-55116cd21888" (UID: "f65dca56-2ff8-4b47-83f2-55116cd21888"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.034305 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jbc8\" (UniqueName: \"kubernetes.io/projected/f65dca56-2ff8-4b47-83f2-55116cd21888-kube-api-access-5jbc8\") on node \"crc\" DevicePath \"\"" Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.034634 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.034756 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f65dca56-2ff8-4b47-83f2-55116cd21888-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.539480 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bbsh" event={"ID":"f65dca56-2ff8-4b47-83f2-55116cd21888","Type":"ContainerDied","Data":"10a3e69f8b1b17ec79f1c694b946077caf0f338134e24a99652034676787b160"} Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.539581 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bbsh" Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.539897 4911 scope.go:117] "RemoveContainer" containerID="89708b448c3fd309b0827efd6b1b7432a42b93c5a35be4d3d8a980a77094284f" Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.573189 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bbsh"] Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.577066 4911 scope.go:117] "RemoveContainer" containerID="74fd688852e3ed02866c034029b47ab3d60aba4e3b1224fd858078eb2cce12a2" Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.585674 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bbsh"] Jun 06 10:32:42 crc kubenswrapper[4911]: I0606 10:32:42.955459 4911 scope.go:117] "RemoveContainer" containerID="6045aa27ce0db1b59f5b41fa833bf4a8f45fa57765cbfeed5c553df6655d48ff" Jun 06 10:32:43 crc kubenswrapper[4911]: I0606 10:32:43.964349 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" path="/var/lib/kubelet/pods/f65dca56-2ff8-4b47-83f2-55116cd21888/volumes" Jun 06 10:32:54 crc kubenswrapper[4911]: I0606 10:32:54.300580 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:32:54 crc kubenswrapper[4911]: I0606 10:32:54.301427 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.851060 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-8hs8m"] Jun 06 10:33:01 crc kubenswrapper[4911]: E0606 10:33:01.852448 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="extract-utilities" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.852470 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="extract-utilities" Jun 06 10:33:01 crc kubenswrapper[4911]: E0606 10:33:01.852484 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="registry-server" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.852492 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="registry-server" Jun 06 10:33:01 crc kubenswrapper[4911]: E0606 10:33:01.852552 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="extract-content" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.852560 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="extract-content" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.852829 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f65dca56-2ff8-4b47-83f2-55116cd21888" containerName="registry-server" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.853837 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8hs8m" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.964701 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-host\") pod \"crc-debug-8hs8m\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " pod="openstack/crc-debug-8hs8m" Jun 06 10:33:01 crc kubenswrapper[4911]: I0606 10:33:01.965338 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvkk7\" (UniqueName: \"kubernetes.io/projected/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-kube-api-access-mvkk7\") pod \"crc-debug-8hs8m\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " pod="openstack/crc-debug-8hs8m" Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.067963 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-host\") pod \"crc-debug-8hs8m\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " pod="openstack/crc-debug-8hs8m" Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.068174 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvkk7\" (UniqueName: \"kubernetes.io/projected/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-kube-api-access-mvkk7\") pod \"crc-debug-8hs8m\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " pod="openstack/crc-debug-8hs8m" Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.068184 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-host\") pod \"crc-debug-8hs8m\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " pod="openstack/crc-debug-8hs8m" Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.091039 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvkk7\" (UniqueName: \"kubernetes.io/projected/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-kube-api-access-mvkk7\") pod \"crc-debug-8hs8m\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " pod="openstack/crc-debug-8hs8m" Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.176238 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8hs8m" Jun 06 10:33:02 crc kubenswrapper[4911]: W0606 10:33:02.211260 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73345df5_7db5_4eac_b5a7_2ab5f0ff0578.slice/crio-aa05a62362642f40910d82a4b6d9dc818ef2336a08571bf452ced1d41f3aa7a2 WatchSource:0}: Error finding container aa05a62362642f40910d82a4b6d9dc818ef2336a08571bf452ced1d41f3aa7a2: Status 404 returned error can't find the container with id aa05a62362642f40910d82a4b6d9dc818ef2336a08571bf452ced1d41f3aa7a2 Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.745267 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8hs8m" event={"ID":"73345df5-7db5-4eac-b5a7-2ab5f0ff0578","Type":"ContainerStarted","Data":"6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9"} Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.745763 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8hs8m" event={"ID":"73345df5-7db5-4eac-b5a7-2ab5f0ff0578","Type":"ContainerStarted","Data":"aa05a62362642f40910d82a4b6d9dc818ef2336a08571bf452ced1d41f3aa7a2"} Jun 06 10:33:02 crc kubenswrapper[4911]: I0606 10:33:02.776891 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-8hs8m" podStartSLOduration=1.7768506149999999 podStartE2EDuration="1.776850615s" podCreationTimestamp="2025-06-06 10:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:33:02.761389678 +0000 UTC m=+4794.036815271" watchObservedRunningTime="2025-06-06 10:33:02.776850615 +0000 UTC m=+4794.052276178" Jun 06 10:33:12 crc kubenswrapper[4911]: I0606 10:33:12.892150 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-8hs8m"] Jun 06 10:33:12 crc kubenswrapper[4911]: I0606 10:33:12.894009 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-8hs8m" podUID="73345df5-7db5-4eac-b5a7-2ab5f0ff0578" containerName="container-00" containerID="cri-o://6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9" gracePeriod=2 Jun 06 10:33:12 crc kubenswrapper[4911]: I0606 10:33:12.906224 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-8hs8m"] Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.046585 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8hs8m" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.140619 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvkk7\" (UniqueName: \"kubernetes.io/projected/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-kube-api-access-mvkk7\") pod \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.140911 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-host\") pod \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\" (UID: \"73345df5-7db5-4eac-b5a7-2ab5f0ff0578\") " Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.141736 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-host" (OuterVolumeSpecName: "host") pod "73345df5-7db5-4eac-b5a7-2ab5f0ff0578" (UID: "73345df5-7db5-4eac-b5a7-2ab5f0ff0578"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.142317 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.150175 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-kube-api-access-mvkk7" (OuterVolumeSpecName: "kube-api-access-mvkk7") pod "73345df5-7db5-4eac-b5a7-2ab5f0ff0578" (UID: "73345df5-7db5-4eac-b5a7-2ab5f0ff0578"). InnerVolumeSpecName "kube-api-access-mvkk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.244576 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvkk7\" (UniqueName: \"kubernetes.io/projected/73345df5-7db5-4eac-b5a7-2ab5f0ff0578-kube-api-access-mvkk7\") on node \"crc\" DevicePath \"\"" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.887426 4911 generic.go:334] "Generic (PLEG): container finished" podID="73345df5-7db5-4eac-b5a7-2ab5f0ff0578" containerID="6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9" exitCode=0 Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.887493 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8hs8m" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.887603 4911 scope.go:117] "RemoveContainer" containerID="6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.918539 4911 scope.go:117] "RemoveContainer" containerID="6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9" Jun 06 10:33:13 crc kubenswrapper[4911]: E0606 10:33:13.919317 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9\": container with ID starting with 6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9 not found: ID does not exist" containerID="6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.919403 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9"} err="failed to get container status \"6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9\": rpc error: code = NotFound desc = could not find container \"6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9\": container with ID starting with 6174d5d31e8502772d4b16609b98f160bf344f591e35369333b2442a40d19fd9 not found: ID does not exist" Jun 06 10:33:13 crc kubenswrapper[4911]: I0606 10:33:13.960458 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73345df5-7db5-4eac-b5a7-2ab5f0ff0578" path="/var/lib/kubelet/pods/73345df5-7db5-4eac-b5a7-2ab5f0ff0578/volumes" Jun 06 10:33:24 crc kubenswrapper[4911]: I0606 10:33:24.300504 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:33:24 crc kubenswrapper[4911]: I0606 10:33:24.301265 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:33:24 crc kubenswrapper[4911]: I0606 10:33:24.301329 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:33:24 crc kubenswrapper[4911]: I0606 10:33:24.302210 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8718fb62374e5accd6e02ad687ffd15b81dad7742fe81c95f29f54f27db70ac9"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:33:24 crc kubenswrapper[4911]: I0606 10:33:24.302268 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://8718fb62374e5accd6e02ad687ffd15b81dad7742fe81c95f29f54f27db70ac9" gracePeriod=600 Jun 06 10:33:25 crc kubenswrapper[4911]: I0606 10:33:25.010899 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="8718fb62374e5accd6e02ad687ffd15b81dad7742fe81c95f29f54f27db70ac9" exitCode=0 Jun 06 10:33:25 crc kubenswrapper[4911]: I0606 10:33:25.010964 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"8718fb62374e5accd6e02ad687ffd15b81dad7742fe81c95f29f54f27db70ac9"} Jun 06 10:33:25 crc kubenswrapper[4911]: I0606 10:33:25.011768 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39"} Jun 06 10:33:25 crc kubenswrapper[4911]: I0606 10:33:25.011795 4911 scope.go:117] "RemoveContainer" containerID="6c56d2cfcc8db637402daf8ed67259357263d83cf478fc12bc2cb2b7c924b112" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.313563 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-h7fjf"] Jun 06 10:34:02 crc kubenswrapper[4911]: E0606 10:34:02.315351 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73345df5-7db5-4eac-b5a7-2ab5f0ff0578" containerName="container-00" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.315368 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="73345df5-7db5-4eac-b5a7-2ab5f0ff0578" containerName="container-00" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.315567 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="73345df5-7db5-4eac-b5a7-2ab5f0ff0578" containerName="container-00" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.316271 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h7fjf" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.387609 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-host\") pod \"crc-debug-h7fjf\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " pod="openstack/crc-debug-h7fjf" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.387800 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkfxg\" (UniqueName: \"kubernetes.io/projected/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-kube-api-access-wkfxg\") pod \"crc-debug-h7fjf\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " pod="openstack/crc-debug-h7fjf" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.490755 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-host\") pod \"crc-debug-h7fjf\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " pod="openstack/crc-debug-h7fjf" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.490944 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-host\") pod \"crc-debug-h7fjf\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " pod="openstack/crc-debug-h7fjf" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.492649 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkfxg\" (UniqueName: \"kubernetes.io/projected/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-kube-api-access-wkfxg\") pod \"crc-debug-h7fjf\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " pod="openstack/crc-debug-h7fjf" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.517683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkfxg\" (UniqueName: \"kubernetes.io/projected/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-kube-api-access-wkfxg\") pod \"crc-debug-h7fjf\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " pod="openstack/crc-debug-h7fjf" Jun 06 10:34:02 crc kubenswrapper[4911]: I0606 10:34:02.640216 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h7fjf" Jun 06 10:34:03 crc kubenswrapper[4911]: I0606 10:34:03.421536 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-h7fjf" event={"ID":"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1","Type":"ContainerStarted","Data":"49cf173d04dc0553409e6291f04cb1d61c91a53ef44b60d4a898d6e1d92bec9a"} Jun 06 10:34:04 crc kubenswrapper[4911]: I0606 10:34:04.436262 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-h7fjf" event={"ID":"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1","Type":"ContainerStarted","Data":"f5665667c83f53df7915a2c224d6aa9fbc5415b2548080f42a8c84d094710761"} Jun 06 10:34:04 crc kubenswrapper[4911]: I0606 10:34:04.456778 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-h7fjf" podStartSLOduration=2.456753178 podStartE2EDuration="2.456753178s" podCreationTimestamp="2025-06-06 10:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:34:04.453854834 +0000 UTC m=+4855.729280397" watchObservedRunningTime="2025-06-06 10:34:04.456753178 +0000 UTC m=+4855.732178721" Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.322345 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-h7fjf"] Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.323398 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-h7fjf" podUID="5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" containerName="container-00" containerID="cri-o://f5665667c83f53df7915a2c224d6aa9fbc5415b2548080f42a8c84d094710761" gracePeriod=2 Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.331986 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-h7fjf"] Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.539406 4911 generic.go:334] "Generic (PLEG): container finished" podID="5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" containerID="f5665667c83f53df7915a2c224d6aa9fbc5415b2548080f42a8c84d094710761" exitCode=0 Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.539529 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49cf173d04dc0553409e6291f04cb1d61c91a53ef44b60d4a898d6e1d92bec9a" Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.645759 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h7fjf" Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.714985 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkfxg\" (UniqueName: \"kubernetes.io/projected/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-kube-api-access-wkfxg\") pod \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.715563 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-host\") pod \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\" (UID: \"5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1\") " Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.715729 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-host" (OuterVolumeSpecName: "host") pod "5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" (UID: "5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.716786 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.721706 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-kube-api-access-wkfxg" (OuterVolumeSpecName: "kube-api-access-wkfxg") pod "5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" (UID: "5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1"). InnerVolumeSpecName "kube-api-access-wkfxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:34:14 crc kubenswrapper[4911]: I0606 10:34:14.818576 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkfxg\" (UniqueName: \"kubernetes.io/projected/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1-kube-api-access-wkfxg\") on node \"crc\" DevicePath \"\"" Jun 06 10:34:15 crc kubenswrapper[4911]: I0606 10:34:15.550754 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-h7fjf" Jun 06 10:34:15 crc kubenswrapper[4911]: I0606 10:34:15.970478 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" path="/var/lib/kubelet/pods/5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1/volumes" Jun 06 10:34:23 crc kubenswrapper[4911]: I0606 10:34:23.047860 4911 scope.go:117] "RemoveContainer" containerID="1ef1db91324a7e25dc3fb1faf52a69ed9f39042a18694a7ca39d4f4070a4efd1" Jun 06 10:34:23 crc kubenswrapper[4911]: I0606 10:34:23.082230 4911 scope.go:117] "RemoveContainer" containerID="8c05a412068d96b84a76eb50a13a88f662c2913f4b7f28eab80095222f84986b" Jun 06 10:34:23 crc kubenswrapper[4911]: I0606 10:34:23.105971 4911 scope.go:117] "RemoveContainer" containerID="69330685461e9d290945911d406cae1f35a680cd5660edf7849722f002607818" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.686135 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-bskng"] Jun 06 10:35:01 crc kubenswrapper[4911]: E0606 10:35:01.688051 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" containerName="container-00" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.688285 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" containerName="container-00" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.688816 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3e3276-62fb-4c6d-9c74-a6fbebde5ee1" containerName="container-00" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.690735 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bskng" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.805117 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdqmf\" (UniqueName: \"kubernetes.io/projected/f73024fe-34f7-403e-a98c-3fa3d92313af-kube-api-access-mdqmf\") pod \"crc-debug-bskng\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " pod="openstack/crc-debug-bskng" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.805548 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f73024fe-34f7-403e-a98c-3fa3d92313af-host\") pod \"crc-debug-bskng\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " pod="openstack/crc-debug-bskng" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.907652 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdqmf\" (UniqueName: \"kubernetes.io/projected/f73024fe-34f7-403e-a98c-3fa3d92313af-kube-api-access-mdqmf\") pod \"crc-debug-bskng\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " pod="openstack/crc-debug-bskng" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.907770 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f73024fe-34f7-403e-a98c-3fa3d92313af-host\") pod \"crc-debug-bskng\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " pod="openstack/crc-debug-bskng" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.907924 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f73024fe-34f7-403e-a98c-3fa3d92313af-host\") pod \"crc-debug-bskng\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " pod="openstack/crc-debug-bskng" Jun 06 10:35:01 crc kubenswrapper[4911]: I0606 10:35:01.930433 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdqmf\" (UniqueName: \"kubernetes.io/projected/f73024fe-34f7-403e-a98c-3fa3d92313af-kube-api-access-mdqmf\") pod \"crc-debug-bskng\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " pod="openstack/crc-debug-bskng" Jun 06 10:35:02 crc kubenswrapper[4911]: I0606 10:35:02.011671 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bskng" Jun 06 10:35:02 crc kubenswrapper[4911]: I0606 10:35:02.118949 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bskng" event={"ID":"f73024fe-34f7-403e-a98c-3fa3d92313af","Type":"ContainerStarted","Data":"5008caa8c933c0135308efb17722739f2cfbd95a438abd2d7afbaca0b2236f4a"} Jun 06 10:35:03 crc kubenswrapper[4911]: I0606 10:35:03.143683 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-bskng" event={"ID":"f73024fe-34f7-403e-a98c-3fa3d92313af","Type":"ContainerStarted","Data":"e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c"} Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.795890 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-bskng" podStartSLOduration=11.795865751000001 podStartE2EDuration="11.795865751s" podCreationTimestamp="2025-06-06 10:35:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:35:03.159775311 +0000 UTC m=+4914.435200864" watchObservedRunningTime="2025-06-06 10:35:12.795865751 +0000 UTC m=+4924.071291294" Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.808962 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-bskng"] Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.809496 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-bskng" podUID="f73024fe-34f7-403e-a98c-3fa3d92313af" containerName="container-00" containerID="cri-o://e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c" gracePeriod=2 Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.824644 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-bskng"] Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.926534 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bskng" Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.987399 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdqmf\" (UniqueName: \"kubernetes.io/projected/f73024fe-34f7-403e-a98c-3fa3d92313af-kube-api-access-mdqmf\") pod \"f73024fe-34f7-403e-a98c-3fa3d92313af\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.988661 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f73024fe-34f7-403e-a98c-3fa3d92313af-host\") pod \"f73024fe-34f7-403e-a98c-3fa3d92313af\" (UID: \"f73024fe-34f7-403e-a98c-3fa3d92313af\") " Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.988832 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f73024fe-34f7-403e-a98c-3fa3d92313af-host" (OuterVolumeSpecName: "host") pod "f73024fe-34f7-403e-a98c-3fa3d92313af" (UID: "f73024fe-34f7-403e-a98c-3fa3d92313af"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.990125 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f73024fe-34f7-403e-a98c-3fa3d92313af-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:35:12 crc kubenswrapper[4911]: I0606 10:35:12.995930 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f73024fe-34f7-403e-a98c-3fa3d92313af-kube-api-access-mdqmf" (OuterVolumeSpecName: "kube-api-access-mdqmf") pod "f73024fe-34f7-403e-a98c-3fa3d92313af" (UID: "f73024fe-34f7-403e-a98c-3fa3d92313af"). InnerVolumeSpecName "kube-api-access-mdqmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:35:13 crc kubenswrapper[4911]: I0606 10:35:13.093667 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdqmf\" (UniqueName: \"kubernetes.io/projected/f73024fe-34f7-403e-a98c-3fa3d92313af-kube-api-access-mdqmf\") on node \"crc\" DevicePath \"\"" Jun 06 10:35:13 crc kubenswrapper[4911]: I0606 10:35:13.244622 4911 generic.go:334] "Generic (PLEG): container finished" podID="f73024fe-34f7-403e-a98c-3fa3d92313af" containerID="e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c" exitCode=0 Jun 06 10:35:13 crc kubenswrapper[4911]: I0606 10:35:13.244706 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-bskng" Jun 06 10:35:13 crc kubenswrapper[4911]: I0606 10:35:13.244878 4911 scope.go:117] "RemoveContainer" containerID="e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c" Jun 06 10:35:13 crc kubenswrapper[4911]: I0606 10:35:13.272221 4911 scope.go:117] "RemoveContainer" containerID="e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c" Jun 06 10:35:13 crc kubenswrapper[4911]: E0606 10:35:13.276310 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c\": container with ID starting with e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c not found: ID does not exist" containerID="e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c" Jun 06 10:35:13 crc kubenswrapper[4911]: I0606 10:35:13.276387 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c"} err="failed to get container status \"e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c\": rpc error: code = NotFound desc = could not find container \"e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c\": container with ID starting with e25716ed28222ec8974571bb63461f3724df459a32d0e8c5484894176d6f875c not found: ID does not exist" Jun 06 10:35:13 crc kubenswrapper[4911]: I0606 10:35:13.962864 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f73024fe-34f7-403e-a98c-3fa3d92313af" path="/var/lib/kubelet/pods/f73024fe-34f7-403e-a98c-3fa3d92313af/volumes" Jun 06 10:35:24 crc kubenswrapper[4911]: I0606 10:35:24.300584 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:35:24 crc kubenswrapper[4911]: I0606 10:35:24.301151 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:35:54 crc kubenswrapper[4911]: I0606 10:35:54.300661 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:35:54 crc kubenswrapper[4911]: I0606 10:35:54.301244 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.141287 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-2nrcg"] Jun 06 10:36:02 crc kubenswrapper[4911]: E0606 10:36:02.142387 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f73024fe-34f7-403e-a98c-3fa3d92313af" containerName="container-00" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.142404 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f73024fe-34f7-403e-a98c-3fa3d92313af" containerName="container-00" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.142659 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f73024fe-34f7-403e-a98c-3fa3d92313af" containerName="container-00" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.143467 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.259783 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5txh\" (UniqueName: \"kubernetes.io/projected/c815e63b-2d76-43ac-9a1b-eeb5b3402407-kube-api-access-h5txh\") pod \"crc-debug-2nrcg\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.259965 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c815e63b-2d76-43ac-9a1b-eeb5b3402407-host\") pod \"crc-debug-2nrcg\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.362718 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5txh\" (UniqueName: \"kubernetes.io/projected/c815e63b-2d76-43ac-9a1b-eeb5b3402407-kube-api-access-h5txh\") pod \"crc-debug-2nrcg\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.362797 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c815e63b-2d76-43ac-9a1b-eeb5b3402407-host\") pod \"crc-debug-2nrcg\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.362943 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c815e63b-2d76-43ac-9a1b-eeb5b3402407-host\") pod \"crc-debug-2nrcg\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.391860 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5txh\" (UniqueName: \"kubernetes.io/projected/c815e63b-2d76-43ac-9a1b-eeb5b3402407-kube-api-access-h5txh\") pod \"crc-debug-2nrcg\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.469126 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nrcg" Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.959675 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2nrcg" event={"ID":"c815e63b-2d76-43ac-9a1b-eeb5b3402407","Type":"ContainerStarted","Data":"795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963"} Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.960042 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2nrcg" event={"ID":"c815e63b-2d76-43ac-9a1b-eeb5b3402407","Type":"ContainerStarted","Data":"1015815e8dc48224d14942627051de0445c1ba7035a4fc93a5b5b9427b1d6c5f"} Jun 06 10:36:02 crc kubenswrapper[4911]: I0606 10:36:02.977925 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-2nrcg" podStartSLOduration=0.977896031 podStartE2EDuration="977.896031ms" podCreationTimestamp="2025-06-06 10:36:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:36:02.974994786 +0000 UTC m=+4974.250420329" watchObservedRunningTime="2025-06-06 10:36:02.977896031 +0000 UTC m=+4974.253321574" Jun 06 10:36:04 crc kubenswrapper[4911]: I0606 10:36:04.798650 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-59f44bc869-n7dl7" podUID="72e5a926-1c68-4e9b-9240-44c27d488e36" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.174012 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-2nrcg"] Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.175233 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-2nrcg" podUID="c815e63b-2d76-43ac-9a1b-eeb5b3402407" containerName="container-00" containerID="cri-o://795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963" gracePeriod=2 Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.183423 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-2nrcg"] Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.288108 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nrcg" Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.361771 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c815e63b-2d76-43ac-9a1b-eeb5b3402407-host\") pod \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.361952 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c815e63b-2d76-43ac-9a1b-eeb5b3402407-host" (OuterVolumeSpecName: "host") pod "c815e63b-2d76-43ac-9a1b-eeb5b3402407" (UID: "c815e63b-2d76-43ac-9a1b-eeb5b3402407"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.362458 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5txh\" (UniqueName: \"kubernetes.io/projected/c815e63b-2d76-43ac-9a1b-eeb5b3402407-kube-api-access-h5txh\") pod \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\" (UID: \"c815e63b-2d76-43ac-9a1b-eeb5b3402407\") " Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.363791 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c815e63b-2d76-43ac-9a1b-eeb5b3402407-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.370164 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c815e63b-2d76-43ac-9a1b-eeb5b3402407-kube-api-access-h5txh" (OuterVolumeSpecName: "kube-api-access-h5txh") pod "c815e63b-2d76-43ac-9a1b-eeb5b3402407" (UID: "c815e63b-2d76-43ac-9a1b-eeb5b3402407"). InnerVolumeSpecName "kube-api-access-h5txh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.466002 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5txh\" (UniqueName: \"kubernetes.io/projected/c815e63b-2d76-43ac-9a1b-eeb5b3402407-kube-api-access-h5txh\") on node \"crc\" DevicePath \"\"" Jun 06 10:36:13 crc kubenswrapper[4911]: I0606 10:36:13.962938 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c815e63b-2d76-43ac-9a1b-eeb5b3402407" path="/var/lib/kubelet/pods/c815e63b-2d76-43ac-9a1b-eeb5b3402407/volumes" Jun 06 10:36:14 crc kubenswrapper[4911]: I0606 10:36:14.077586 4911 generic.go:334] "Generic (PLEG): container finished" podID="c815e63b-2d76-43ac-9a1b-eeb5b3402407" containerID="795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963" exitCode=0 Jun 06 10:36:14 crc kubenswrapper[4911]: I0606 10:36:14.077646 4911 scope.go:117] "RemoveContainer" containerID="795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963" Jun 06 10:36:14 crc kubenswrapper[4911]: I0606 10:36:14.077656 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nrcg" Jun 06 10:36:14 crc kubenswrapper[4911]: I0606 10:36:14.130294 4911 scope.go:117] "RemoveContainer" containerID="795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963" Jun 06 10:36:14 crc kubenswrapper[4911]: E0606 10:36:14.130956 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963\": container with ID starting with 795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963 not found: ID does not exist" containerID="795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963" Jun 06 10:36:14 crc kubenswrapper[4911]: I0606 10:36:14.131041 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963"} err="failed to get container status \"795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963\": rpc error: code = NotFound desc = could not find container \"795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963\": container with ID starting with 795d9132c8a8061c54efb47c0f29abd18da16ea9f412726af6cc02dc44eaf963 not found: ID does not exist" Jun 06 10:36:24 crc kubenswrapper[4911]: I0606 10:36:24.300437 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:36:24 crc kubenswrapper[4911]: I0606 10:36:24.301393 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:36:24 crc kubenswrapper[4911]: I0606 10:36:24.301509 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:36:24 crc kubenswrapper[4911]: I0606 10:36:24.303311 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:36:24 crc kubenswrapper[4911]: I0606 10:36:24.303390 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" gracePeriod=600 Jun 06 10:36:24 crc kubenswrapper[4911]: E0606 10:36:24.431187 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:36:25 crc kubenswrapper[4911]: I0606 10:36:25.199455 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" exitCode=0 Jun 06 10:36:25 crc kubenswrapper[4911]: I0606 10:36:25.199713 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39"} Jun 06 10:36:25 crc kubenswrapper[4911]: I0606 10:36:25.199852 4911 scope.go:117] "RemoveContainer" containerID="8718fb62374e5accd6e02ad687ffd15b81dad7742fe81c95f29f54f27db70ac9" Jun 06 10:36:25 crc kubenswrapper[4911]: I0606 10:36:25.201162 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:36:25 crc kubenswrapper[4911]: E0606 10:36:25.201545 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.336845 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tvltc"] Jun 06 10:36:33 crc kubenswrapper[4911]: E0606 10:36:33.338268 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c815e63b-2d76-43ac-9a1b-eeb5b3402407" containerName="container-00" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.338285 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c815e63b-2d76-43ac-9a1b-eeb5b3402407" containerName="container-00" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.338582 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c815e63b-2d76-43ac-9a1b-eeb5b3402407" containerName="container-00" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.340658 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.349190 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tvltc"] Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.483139 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-catalog-content\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.484354 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zltf\" (UniqueName: \"kubernetes.io/projected/b86cf713-6a9b-423b-8d95-7866463185de-kube-api-access-8zltf\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.484549 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-utilities\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.587390 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-catalog-content\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.587515 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zltf\" (UniqueName: \"kubernetes.io/projected/b86cf713-6a9b-423b-8d95-7866463185de-kube-api-access-8zltf\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.587573 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-utilities\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.588130 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-catalog-content\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.588156 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-utilities\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.613288 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zltf\" (UniqueName: \"kubernetes.io/projected/b86cf713-6a9b-423b-8d95-7866463185de-kube-api-access-8zltf\") pod \"community-operators-tvltc\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:33 crc kubenswrapper[4911]: I0606 10:36:33.667143 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:34 crc kubenswrapper[4911]: I0606 10:36:34.455428 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tvltc"] Jun 06 10:36:35 crc kubenswrapper[4911]: I0606 10:36:35.322653 4911 generic.go:334] "Generic (PLEG): container finished" podID="b86cf713-6a9b-423b-8d95-7866463185de" containerID="572881305a0aa38987ad31781849297fb850cada0cdbe705bc1b4833b7ece1fd" exitCode=0 Jun 06 10:36:35 crc kubenswrapper[4911]: I0606 10:36:35.322724 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tvltc" event={"ID":"b86cf713-6a9b-423b-8d95-7866463185de","Type":"ContainerDied","Data":"572881305a0aa38987ad31781849297fb850cada0cdbe705bc1b4833b7ece1fd"} Jun 06 10:36:35 crc kubenswrapper[4911]: I0606 10:36:35.323263 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tvltc" event={"ID":"b86cf713-6a9b-423b-8d95-7866463185de","Type":"ContainerStarted","Data":"5d2d06f8b6e7e1ae3af5bc26282f7cb6bca133fd3252c91ec1f5ae1251c2f44d"} Jun 06 10:36:37 crc kubenswrapper[4911]: I0606 10:36:37.949472 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:36:37 crc kubenswrapper[4911]: E0606 10:36:37.950256 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:36:40 crc kubenswrapper[4911]: I0606 10:36:40.376381 4911 generic.go:334] "Generic (PLEG): container finished" podID="b86cf713-6a9b-423b-8d95-7866463185de" containerID="bc88cf040b5560b71083a6ee4230abd7769b29ffb20eaccfbbac11a589915311" exitCode=0 Jun 06 10:36:40 crc kubenswrapper[4911]: I0606 10:36:40.377010 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tvltc" event={"ID":"b86cf713-6a9b-423b-8d95-7866463185de","Type":"ContainerDied","Data":"bc88cf040b5560b71083a6ee4230abd7769b29ffb20eaccfbbac11a589915311"} Jun 06 10:36:42 crc kubenswrapper[4911]: I0606 10:36:42.403208 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tvltc" event={"ID":"b86cf713-6a9b-423b-8d95-7866463185de","Type":"ContainerStarted","Data":"cb655eeebc58395150f18d031d7af47449b95e82524c0afc97cfb2bdd4b172bf"} Jun 06 10:36:42 crc kubenswrapper[4911]: I0606 10:36:42.423023 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tvltc" podStartSLOduration=4.076510865 podStartE2EDuration="9.422998435s" podCreationTimestamp="2025-06-06 10:36:33 +0000 UTC" firstStartedPulling="2025-06-06 10:36:36.347558729 +0000 UTC m=+5007.622984302" lastFinishedPulling="2025-06-06 10:36:41.694046329 +0000 UTC m=+5012.969471872" observedRunningTime="2025-06-06 10:36:42.42164256 +0000 UTC m=+5013.697068133" watchObservedRunningTime="2025-06-06 10:36:42.422998435 +0000 UTC m=+5013.698423988" Jun 06 10:36:43 crc kubenswrapper[4911]: I0606 10:36:43.668257 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:43 crc kubenswrapper[4911]: I0606 10:36:43.668824 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:43 crc kubenswrapper[4911]: I0606 10:36:43.721677 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:50 crc kubenswrapper[4911]: I0606 10:36:50.948893 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:36:50 crc kubenswrapper[4911]: E0606 10:36:50.949962 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:36:53 crc kubenswrapper[4911]: I0606 10:36:53.722370 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:53 crc kubenswrapper[4911]: I0606 10:36:53.777876 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tvltc"] Jun 06 10:36:54 crc kubenswrapper[4911]: I0606 10:36:54.529566 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tvltc" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="registry-server" containerID="cri-o://cb655eeebc58395150f18d031d7af47449b95e82524c0afc97cfb2bdd4b172bf" gracePeriod=2 Jun 06 10:36:55 crc kubenswrapper[4911]: I0606 10:36:55.541983 4911 generic.go:334] "Generic (PLEG): container finished" podID="b86cf713-6a9b-423b-8d95-7866463185de" containerID="cb655eeebc58395150f18d031d7af47449b95e82524c0afc97cfb2bdd4b172bf" exitCode=0 Jun 06 10:36:55 crc kubenswrapper[4911]: I0606 10:36:55.542062 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tvltc" event={"ID":"b86cf713-6a9b-423b-8d95-7866463185de","Type":"ContainerDied","Data":"cb655eeebc58395150f18d031d7af47449b95e82524c0afc97cfb2bdd4b172bf"} Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.034483 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.164002 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zltf\" (UniqueName: \"kubernetes.io/projected/b86cf713-6a9b-423b-8d95-7866463185de-kube-api-access-8zltf\") pod \"b86cf713-6a9b-423b-8d95-7866463185de\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.164216 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-catalog-content\") pod \"b86cf713-6a9b-423b-8d95-7866463185de\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.164297 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-utilities\") pod \"b86cf713-6a9b-423b-8d95-7866463185de\" (UID: \"b86cf713-6a9b-423b-8d95-7866463185de\") " Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.165319 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-utilities" (OuterVolumeSpecName: "utilities") pod "b86cf713-6a9b-423b-8d95-7866463185de" (UID: "b86cf713-6a9b-423b-8d95-7866463185de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.172341 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b86cf713-6a9b-423b-8d95-7866463185de-kube-api-access-8zltf" (OuterVolumeSpecName: "kube-api-access-8zltf") pod "b86cf713-6a9b-423b-8d95-7866463185de" (UID: "b86cf713-6a9b-423b-8d95-7866463185de"). InnerVolumeSpecName "kube-api-access-8zltf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.212656 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b86cf713-6a9b-423b-8d95-7866463185de" (UID: "b86cf713-6a9b-423b-8d95-7866463185de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.267154 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zltf\" (UniqueName: \"kubernetes.io/projected/b86cf713-6a9b-423b-8d95-7866463185de-kube-api-access-8zltf\") on node \"crc\" DevicePath \"\"" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.267218 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.267229 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b86cf713-6a9b-423b-8d95-7866463185de-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.559846 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tvltc" event={"ID":"b86cf713-6a9b-423b-8d95-7866463185de","Type":"ContainerDied","Data":"5d2d06f8b6e7e1ae3af5bc26282f7cb6bca133fd3252c91ec1f5ae1251c2f44d"} Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.561178 4911 scope.go:117] "RemoveContainer" containerID="cb655eeebc58395150f18d031d7af47449b95e82524c0afc97cfb2bdd4b172bf" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.559976 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tvltc" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.590367 4911 scope.go:117] "RemoveContainer" containerID="bc88cf040b5560b71083a6ee4230abd7769b29ffb20eaccfbbac11a589915311" Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.612271 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tvltc"] Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.622038 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tvltc"] Jun 06 10:36:56 crc kubenswrapper[4911]: I0606 10:36:56.634243 4911 scope.go:117] "RemoveContainer" containerID="572881305a0aa38987ad31781849297fb850cada0cdbe705bc1b4833b7ece1fd" Jun 06 10:36:57 crc kubenswrapper[4911]: I0606 10:36:57.976823 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b86cf713-6a9b-423b-8d95-7866463185de" path="/var/lib/kubelet/pods/b86cf713-6a9b-423b-8d95-7866463185de/volumes" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.546363 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-mztb4"] Jun 06 10:37:01 crc kubenswrapper[4911]: E0606 10:37:01.547669 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="extract-content" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.547688 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="extract-content" Jun 06 10:37:01 crc kubenswrapper[4911]: E0606 10:37:01.547724 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="registry-server" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.547731 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="registry-server" Jun 06 10:37:01 crc kubenswrapper[4911]: E0606 10:37:01.547745 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="extract-utilities" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.547752 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="extract-utilities" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.547952 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b86cf713-6a9b-423b-8d95-7866463185de" containerName="registry-server" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.548699 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.599182 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f84610f-c0ad-43af-8838-39aca13d2edb-host\") pod \"crc-debug-mztb4\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.599335 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w87j5\" (UniqueName: \"kubernetes.io/projected/7f84610f-c0ad-43af-8838-39aca13d2edb-kube-api-access-w87j5\") pod \"crc-debug-mztb4\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.702386 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w87j5\" (UniqueName: \"kubernetes.io/projected/7f84610f-c0ad-43af-8838-39aca13d2edb-kube-api-access-w87j5\") pod \"crc-debug-mztb4\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.702607 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f84610f-c0ad-43af-8838-39aca13d2edb-host\") pod \"crc-debug-mztb4\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.702765 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f84610f-c0ad-43af-8838-39aca13d2edb-host\") pod \"crc-debug-mztb4\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.749285 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w87j5\" (UniqueName: \"kubernetes.io/projected/7f84610f-c0ad-43af-8838-39aca13d2edb-kube-api-access-w87j5\") pod \"crc-debug-mztb4\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.877175 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mztb4" Jun 06 10:37:01 crc kubenswrapper[4911]: I0606 10:37:01.949026 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:37:01 crc kubenswrapper[4911]: E0606 10:37:01.949713 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:37:02 crc kubenswrapper[4911]: I0606 10:37:02.662037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mztb4" event={"ID":"7f84610f-c0ad-43af-8838-39aca13d2edb","Type":"ContainerStarted","Data":"d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701"} Jun 06 10:37:02 crc kubenswrapper[4911]: I0606 10:37:02.662432 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mztb4" event={"ID":"7f84610f-c0ad-43af-8838-39aca13d2edb","Type":"ContainerStarted","Data":"33f8f76abe23a5bd6ee5efa8ae890d2431dea313317243fc3d7f4d822be70629"} Jun 06 10:37:02 crc kubenswrapper[4911]: I0606 10:37:02.689812 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-mztb4" podStartSLOduration=1.689789396 podStartE2EDuration="1.689789396s" podCreationTimestamp="2025-06-06 10:37:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:37:02.686427029 +0000 UTC m=+5033.961852572" watchObservedRunningTime="2025-06-06 10:37:02.689789396 +0000 UTC m=+5033.965214939" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.651772 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-mztb4"] Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.652920 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-mztb4" podUID="7f84610f-c0ad-43af-8838-39aca13d2edb" containerName="container-00" containerID="cri-o://d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701" gracePeriod=2 Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.664412 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-mztb4"] Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.746828 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mztb4" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.773680 4911 generic.go:334] "Generic (PLEG): container finished" podID="7f84610f-c0ad-43af-8838-39aca13d2edb" containerID="d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701" exitCode=0 Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.773751 4911 scope.go:117] "RemoveContainer" containerID="d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.773749 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mztb4" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.810669 4911 scope.go:117] "RemoveContainer" containerID="d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701" Jun 06 10:37:12 crc kubenswrapper[4911]: E0606 10:37:12.811565 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701\": container with ID starting with d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701 not found: ID does not exist" containerID="d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.811639 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701"} err="failed to get container status \"d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701\": rpc error: code = NotFound desc = could not find container \"d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701\": container with ID starting with d018a04b13f2d5817d37a77e70cca34d19d8238a9d7cb645168e3823974c3701 not found: ID does not exist" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.871937 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w87j5\" (UniqueName: \"kubernetes.io/projected/7f84610f-c0ad-43af-8838-39aca13d2edb-kube-api-access-w87j5\") pod \"7f84610f-c0ad-43af-8838-39aca13d2edb\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.872462 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f84610f-c0ad-43af-8838-39aca13d2edb-host\") pod \"7f84610f-c0ad-43af-8838-39aca13d2edb\" (UID: \"7f84610f-c0ad-43af-8838-39aca13d2edb\") " Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.872850 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f84610f-c0ad-43af-8838-39aca13d2edb-host" (OuterVolumeSpecName: "host") pod "7f84610f-c0ad-43af-8838-39aca13d2edb" (UID: "7f84610f-c0ad-43af-8838-39aca13d2edb"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.873548 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f84610f-c0ad-43af-8838-39aca13d2edb-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.881456 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f84610f-c0ad-43af-8838-39aca13d2edb-kube-api-access-w87j5" (OuterVolumeSpecName: "kube-api-access-w87j5") pod "7f84610f-c0ad-43af-8838-39aca13d2edb" (UID: "7f84610f-c0ad-43af-8838-39aca13d2edb"). InnerVolumeSpecName "kube-api-access-w87j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:37:12 crc kubenswrapper[4911]: I0606 10:37:12.976559 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w87j5\" (UniqueName: \"kubernetes.io/projected/7f84610f-c0ad-43af-8838-39aca13d2edb-kube-api-access-w87j5\") on node \"crc\" DevicePath \"\"" Jun 06 10:37:13 crc kubenswrapper[4911]: I0606 10:37:13.966019 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f84610f-c0ad-43af-8838-39aca13d2edb" path="/var/lib/kubelet/pods/7f84610f-c0ad-43af-8838-39aca13d2edb/volumes" Jun 06 10:37:15 crc kubenswrapper[4911]: I0606 10:37:15.948721 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:37:15 crc kubenswrapper[4911]: E0606 10:37:15.949737 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:37:28 crc kubenswrapper[4911]: I0606 10:37:28.947798 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:37:28 crc kubenswrapper[4911]: E0606 10:37:28.948936 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:37:39 crc kubenswrapper[4911]: I0606 10:37:39.955483 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:37:39 crc kubenswrapper[4911]: E0606 10:37:39.956243 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:37:51 crc kubenswrapper[4911]: I0606 10:37:51.948608 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:37:51 crc kubenswrapper[4911]: E0606 10:37:51.949568 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.006788 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-qsp7l"] Jun 06 10:38:02 crc kubenswrapper[4911]: E0606 10:38:02.007904 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f84610f-c0ad-43af-8838-39aca13d2edb" containerName="container-00" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.007919 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f84610f-c0ad-43af-8838-39aca13d2edb" containerName="container-00" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.008359 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f84610f-c0ad-43af-8838-39aca13d2edb" containerName="container-00" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.009185 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.067566 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc7mk\" (UniqueName: \"kubernetes.io/projected/92d3378f-4417-4cf9-8227-043923cb83f5-kube-api-access-lc7mk\") pod \"crc-debug-qsp7l\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.067919 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/92d3378f-4417-4cf9-8227-043923cb83f5-host\") pod \"crc-debug-qsp7l\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.171037 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/92d3378f-4417-4cf9-8227-043923cb83f5-host\") pod \"crc-debug-qsp7l\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.171279 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/92d3378f-4417-4cf9-8227-043923cb83f5-host\") pod \"crc-debug-qsp7l\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.171322 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc7mk\" (UniqueName: \"kubernetes.io/projected/92d3378f-4417-4cf9-8227-043923cb83f5-kube-api-access-lc7mk\") pod \"crc-debug-qsp7l\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.196922 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc7mk\" (UniqueName: \"kubernetes.io/projected/92d3378f-4417-4cf9-8227-043923cb83f5-kube-api-access-lc7mk\") pod \"crc-debug-qsp7l\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.347334 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qsp7l" Jun 06 10:38:02 crc kubenswrapper[4911]: I0606 10:38:02.401654 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qsp7l" event={"ID":"92d3378f-4417-4cf9-8227-043923cb83f5","Type":"ContainerStarted","Data":"76e51420fbda0eb9ba984a5e32c57e9368262ee58e3b3dd724eeda3f119b6afa"} Jun 06 10:38:03 crc kubenswrapper[4911]: I0606 10:38:03.414899 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qsp7l" event={"ID":"92d3378f-4417-4cf9-8227-043923cb83f5","Type":"ContainerStarted","Data":"3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a"} Jun 06 10:38:03 crc kubenswrapper[4911]: I0606 10:38:03.432717 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-qsp7l" podStartSLOduration=2.43268767 podStartE2EDuration="2.43268767s" podCreationTimestamp="2025-06-06 10:38:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:38:03.431636973 +0000 UTC m=+5094.707062516" watchObservedRunningTime="2025-06-06 10:38:03.43268767 +0000 UTC m=+5094.708113213" Jun 06 10:38:05 crc kubenswrapper[4911]: I0606 10:38:05.952342 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:38:05 crc kubenswrapper[4911]: E0606 10:38:05.960126 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.406274 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-qsp7l"] Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.408998 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-qsp7l" podUID="92d3378f-4417-4cf9-8227-043923cb83f5" containerName="container-00" containerID="cri-o://3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a" gracePeriod=2 Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.420580 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-qsp7l"] Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.525340 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qsp7l" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.527253 4911 generic.go:334] "Generic (PLEG): container finished" podID="92d3378f-4417-4cf9-8227-043923cb83f5" containerID="3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a" exitCode=0 Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.527314 4911 scope.go:117] "RemoveContainer" containerID="3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.527452 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qsp7l" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.554845 4911 scope.go:117] "RemoveContainer" containerID="3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a" Jun 06 10:38:13 crc kubenswrapper[4911]: E0606 10:38:13.555614 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a\": container with ID starting with 3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a not found: ID does not exist" containerID="3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.555688 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a"} err="failed to get container status \"3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a\": rpc error: code = NotFound desc = could not find container \"3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a\": container with ID starting with 3956a042bb5d0b86078d2f7aa1f8c2b48e2e5097c8914a462767ae80d323a73a not found: ID does not exist" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.701335 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc7mk\" (UniqueName: \"kubernetes.io/projected/92d3378f-4417-4cf9-8227-043923cb83f5-kube-api-access-lc7mk\") pod \"92d3378f-4417-4cf9-8227-043923cb83f5\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.701727 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/92d3378f-4417-4cf9-8227-043923cb83f5-host\") pod \"92d3378f-4417-4cf9-8227-043923cb83f5\" (UID: \"92d3378f-4417-4cf9-8227-043923cb83f5\") " Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.701828 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92d3378f-4417-4cf9-8227-043923cb83f5-host" (OuterVolumeSpecName: "host") pod "92d3378f-4417-4cf9-8227-043923cb83f5" (UID: "92d3378f-4417-4cf9-8227-043923cb83f5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.702812 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/92d3378f-4417-4cf9-8227-043923cb83f5-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.707523 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92d3378f-4417-4cf9-8227-043923cb83f5-kube-api-access-lc7mk" (OuterVolumeSpecName: "kube-api-access-lc7mk") pod "92d3378f-4417-4cf9-8227-043923cb83f5" (UID: "92d3378f-4417-4cf9-8227-043923cb83f5"). InnerVolumeSpecName "kube-api-access-lc7mk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.806044 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc7mk\" (UniqueName: \"kubernetes.io/projected/92d3378f-4417-4cf9-8227-043923cb83f5-kube-api-access-lc7mk\") on node \"crc\" DevicePath \"\"" Jun 06 10:38:13 crc kubenswrapper[4911]: I0606 10:38:13.962610 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92d3378f-4417-4cf9-8227-043923cb83f5" path="/var/lib/kubelet/pods/92d3378f-4417-4cf9-8227-043923cb83f5/volumes" Jun 06 10:38:19 crc kubenswrapper[4911]: I0606 10:38:19.956818 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:38:19 crc kubenswrapper[4911]: E0606 10:38:19.958665 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:38:32 crc kubenswrapper[4911]: I0606 10:38:32.949411 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:38:32 crc kubenswrapper[4911]: E0606 10:38:32.950530 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:38:46 crc kubenswrapper[4911]: I0606 10:38:46.948836 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:38:46 crc kubenswrapper[4911]: E0606 10:38:46.949644 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:38:59 crc kubenswrapper[4911]: I0606 10:38:59.957778 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:38:59 crc kubenswrapper[4911]: E0606 10:38:59.958784 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:39:01 crc kubenswrapper[4911]: I0606 10:39:01.763708 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-b7882"] Jun 06 10:39:01 crc kubenswrapper[4911]: E0606 10:39:01.765014 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92d3378f-4417-4cf9-8227-043923cb83f5" containerName="container-00" Jun 06 10:39:01 crc kubenswrapper[4911]: I0606 10:39:01.765036 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="92d3378f-4417-4cf9-8227-043923cb83f5" containerName="container-00" Jun 06 10:39:01 crc kubenswrapper[4911]: I0606 10:39:01.765514 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="92d3378f-4417-4cf9-8227-043923cb83f5" containerName="container-00" Jun 06 10:39:01 crc kubenswrapper[4911]: I0606 10:39:01.766528 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-b7882" Jun 06 10:39:01 crc kubenswrapper[4911]: I0606 10:39:01.919016 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p957l\" (UniqueName: \"kubernetes.io/projected/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-kube-api-access-p957l\") pod \"crc-debug-b7882\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " pod="openstack/crc-debug-b7882" Jun 06 10:39:01 crc kubenswrapper[4911]: I0606 10:39:01.919222 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-host\") pod \"crc-debug-b7882\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " pod="openstack/crc-debug-b7882" Jun 06 10:39:02 crc kubenswrapper[4911]: I0606 10:39:02.021872 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p957l\" (UniqueName: \"kubernetes.io/projected/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-kube-api-access-p957l\") pod \"crc-debug-b7882\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " pod="openstack/crc-debug-b7882" Jun 06 10:39:02 crc kubenswrapper[4911]: I0606 10:39:02.022106 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-host\") pod \"crc-debug-b7882\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " pod="openstack/crc-debug-b7882" Jun 06 10:39:02 crc kubenswrapper[4911]: I0606 10:39:02.022203 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-host\") pod \"crc-debug-b7882\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " pod="openstack/crc-debug-b7882" Jun 06 10:39:02 crc kubenswrapper[4911]: I0606 10:39:02.338383 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p957l\" (UniqueName: \"kubernetes.io/projected/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-kube-api-access-p957l\") pod \"crc-debug-b7882\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " pod="openstack/crc-debug-b7882" Jun 06 10:39:02 crc kubenswrapper[4911]: I0606 10:39:02.398909 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-b7882" Jun 06 10:39:03 crc kubenswrapper[4911]: I0606 10:39:03.045968 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-b7882" event={"ID":"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48","Type":"ContainerStarted","Data":"9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d"} Jun 06 10:39:03 crc kubenswrapper[4911]: I0606 10:39:03.046321 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-b7882" event={"ID":"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48","Type":"ContainerStarted","Data":"5d1395decc9f5c20aa517a7c500ab3e83de10a1351e54d20ad036ae204a8e7a4"} Jun 06 10:39:04 crc kubenswrapper[4911]: I0606 10:39:04.085088 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-b7882" podStartSLOduration=3.085051308 podStartE2EDuration="3.085051308s" podCreationTimestamp="2025-06-06 10:39:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:39:04.071395885 +0000 UTC m=+5155.346821428" watchObservedRunningTime="2025-06-06 10:39:04.085051308 +0000 UTC m=+5155.360476871" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.033538 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jztkg"] Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.037040 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.042661 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jztkg"] Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.196969 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-catalog-content\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.197230 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8xll\" (UniqueName: \"kubernetes.io/projected/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-kube-api-access-j8xll\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.197294 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-utilities\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.299974 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8xll\" (UniqueName: \"kubernetes.io/projected/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-kube-api-access-j8xll\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.300076 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-utilities\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.300169 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-catalog-content\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.300738 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-utilities\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.300763 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-catalog-content\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.324933 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8xll\" (UniqueName: \"kubernetes.io/projected/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-kube-api-access-j8xll\") pod \"redhat-operators-jztkg\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:05 crc kubenswrapper[4911]: I0606 10:39:05.363130 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:06 crc kubenswrapper[4911]: I0606 10:39:06.110046 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jztkg"] Jun 06 10:39:07 crc kubenswrapper[4911]: I0606 10:39:07.107447 4911 generic.go:334] "Generic (PLEG): container finished" podID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerID="9d97be5c2c9d4f8a40763df820a94943421aec5ab21e1ebbdbee56e6059c0183" exitCode=0 Jun 06 10:39:07 crc kubenswrapper[4911]: I0606 10:39:07.107568 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jztkg" event={"ID":"6e4763f6-2ca1-4af0-a13e-a0bffb22154f","Type":"ContainerDied","Data":"9d97be5c2c9d4f8a40763df820a94943421aec5ab21e1ebbdbee56e6059c0183"} Jun 06 10:39:07 crc kubenswrapper[4911]: I0606 10:39:07.107796 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jztkg" event={"ID":"6e4763f6-2ca1-4af0-a13e-a0bffb22154f","Type":"ContainerStarted","Data":"2d883c9be730bc283ecdf47dcf09efa6dd9130b36f9b8864375312d041baf1e6"} Jun 06 10:39:07 crc kubenswrapper[4911]: I0606 10:39:07.112713 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:39:09 crc kubenswrapper[4911]: I0606 10:39:09.131859 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jztkg" event={"ID":"6e4763f6-2ca1-4af0-a13e-a0bffb22154f","Type":"ContainerStarted","Data":"b4669354bd6dca06c98943f7253818a318193239e4cb8d59b082555fa76c6b95"} Jun 06 10:39:10 crc kubenswrapper[4911]: I0606 10:39:10.151918 4911 generic.go:334] "Generic (PLEG): container finished" podID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerID="b4669354bd6dca06c98943f7253818a318193239e4cb8d59b082555fa76c6b95" exitCode=0 Jun 06 10:39:10 crc kubenswrapper[4911]: I0606 10:39:10.152040 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jztkg" event={"ID":"6e4763f6-2ca1-4af0-a13e-a0bffb22154f","Type":"ContainerDied","Data":"b4669354bd6dca06c98943f7253818a318193239e4cb8d59b082555fa76c6b95"} Jun 06 10:39:12 crc kubenswrapper[4911]: I0606 10:39:12.948847 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:39:12 crc kubenswrapper[4911]: E0606 10:39:12.949707 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.511037 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-b7882"] Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.512174 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-b7882" podUID="79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" containerName="container-00" containerID="cri-o://9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d" gracePeriod=2 Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.543237 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-b7882"] Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.662352 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-b7882" Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.788850 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p957l\" (UniqueName: \"kubernetes.io/projected/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-kube-api-access-p957l\") pod \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.788921 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-host\") pod \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\" (UID: \"79d4e68a-32f8-4b1c-bf17-a99d1fff2f48\") " Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.789598 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-host" (OuterVolumeSpecName: "host") pod "79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" (UID: "79d4e68a-32f8-4b1c-bf17-a99d1fff2f48"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.795995 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-kube-api-access-p957l" (OuterVolumeSpecName: "kube-api-access-p957l") pod "79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" (UID: "79d4e68a-32f8-4b1c-bf17-a99d1fff2f48"). InnerVolumeSpecName "kube-api-access-p957l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.891450 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p957l\" (UniqueName: \"kubernetes.io/projected/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-kube-api-access-p957l\") on node \"crc\" DevicePath \"\"" Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.891492 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:39:15 crc kubenswrapper[4911]: I0606 10:39:15.966381 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" path="/var/lib/kubelet/pods/79d4e68a-32f8-4b1c-bf17-a99d1fff2f48/volumes" Jun 06 10:39:16 crc kubenswrapper[4911]: I0606 10:39:16.218969 4911 generic.go:334] "Generic (PLEG): container finished" podID="79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" containerID="9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d" exitCode=0 Jun 06 10:39:16 crc kubenswrapper[4911]: I0606 10:39:16.219521 4911 scope.go:117] "RemoveContainer" containerID="9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d" Jun 06 10:39:16 crc kubenswrapper[4911]: I0606 10:39:16.219682 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-b7882" Jun 06 10:39:16 crc kubenswrapper[4911]: I0606 10:39:16.257254 4911 scope.go:117] "RemoveContainer" containerID="9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d" Jun 06 10:39:16 crc kubenswrapper[4911]: E0606 10:39:16.258083 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d\": container with ID starting with 9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d not found: ID does not exist" containerID="9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d" Jun 06 10:39:16 crc kubenswrapper[4911]: I0606 10:39:16.258145 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d"} err="failed to get container status \"9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d\": rpc error: code = NotFound desc = could not find container \"9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d\": container with ID starting with 9841af839aabde063589650faf727e2aff17489e39efded8ed1cd98b6ba3ef3d not found: ID does not exist" Jun 06 10:39:17 crc kubenswrapper[4911]: I0606 10:39:17.234774 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jztkg" event={"ID":"6e4763f6-2ca1-4af0-a13e-a0bffb22154f","Type":"ContainerStarted","Data":"7e0f067cf978fd56498f13e97a89d210f27f22dafee73a508928716583c132cf"} Jun 06 10:39:17 crc kubenswrapper[4911]: I0606 10:39:17.253510 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jztkg" podStartSLOduration=4.332379861 podStartE2EDuration="13.253486563s" podCreationTimestamp="2025-06-06 10:39:04 +0000 UTC" firstStartedPulling="2025-06-06 10:39:07.112363413 +0000 UTC m=+5158.387788956" lastFinishedPulling="2025-06-06 10:39:16.033470115 +0000 UTC m=+5167.308895658" observedRunningTime="2025-06-06 10:39:17.252987781 +0000 UTC m=+5168.528413324" watchObservedRunningTime="2025-06-06 10:39:17.253486563 +0000 UTC m=+5168.528912106" Jun 06 10:39:23 crc kubenswrapper[4911]: I0606 10:39:23.949185 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:39:23 crc kubenswrapper[4911]: E0606 10:39:23.950312 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:39:25 crc kubenswrapper[4911]: I0606 10:39:25.364079 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:25 crc kubenswrapper[4911]: I0606 10:39:25.364692 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:25 crc kubenswrapper[4911]: I0606 10:39:25.423630 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:26 crc kubenswrapper[4911]: I0606 10:39:26.389881 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:26 crc kubenswrapper[4911]: I0606 10:39:26.445818 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jztkg"] Jun 06 10:39:28 crc kubenswrapper[4911]: I0606 10:39:28.353517 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jztkg" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="registry-server" containerID="cri-o://7e0f067cf978fd56498f13e97a89d210f27f22dafee73a508928716583c132cf" gracePeriod=2 Jun 06 10:39:29 crc kubenswrapper[4911]: I0606 10:39:29.372669 4911 generic.go:334] "Generic (PLEG): container finished" podID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerID="7e0f067cf978fd56498f13e97a89d210f27f22dafee73a508928716583c132cf" exitCode=0 Jun 06 10:39:29 crc kubenswrapper[4911]: I0606 10:39:29.372760 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jztkg" event={"ID":"6e4763f6-2ca1-4af0-a13e-a0bffb22154f","Type":"ContainerDied","Data":"7e0f067cf978fd56498f13e97a89d210f27f22dafee73a508928716583c132cf"} Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.388491 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jztkg" event={"ID":"6e4763f6-2ca1-4af0-a13e-a0bffb22154f","Type":"ContainerDied","Data":"2d883c9be730bc283ecdf47dcf09efa6dd9130b36f9b8864375312d041baf1e6"} Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.389021 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d883c9be730bc283ecdf47dcf09efa6dd9130b36f9b8864375312d041baf1e6" Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.456372 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.659258 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-utilities\") pod \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.659327 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-catalog-content\") pod \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.659678 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8xll\" (UniqueName: \"kubernetes.io/projected/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-kube-api-access-j8xll\") pod \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\" (UID: \"6e4763f6-2ca1-4af0-a13e-a0bffb22154f\") " Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.661536 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-utilities" (OuterVolumeSpecName: "utilities") pod "6e4763f6-2ca1-4af0-a13e-a0bffb22154f" (UID: "6e4763f6-2ca1-4af0-a13e-a0bffb22154f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.668800 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-kube-api-access-j8xll" (OuterVolumeSpecName: "kube-api-access-j8xll") pod "6e4763f6-2ca1-4af0-a13e-a0bffb22154f" (UID: "6e4763f6-2ca1-4af0-a13e-a0bffb22154f"). InnerVolumeSpecName "kube-api-access-j8xll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.726869 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e4763f6-2ca1-4af0-a13e-a0bffb22154f" (UID: "6e4763f6-2ca1-4af0-a13e-a0bffb22154f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.762895 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8xll\" (UniqueName: \"kubernetes.io/projected/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-kube-api-access-j8xll\") on node \"crc\" DevicePath \"\"" Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.762943 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:39:30 crc kubenswrapper[4911]: I0606 10:39:30.762958 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4763f6-2ca1-4af0-a13e-a0bffb22154f-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:39:31 crc kubenswrapper[4911]: I0606 10:39:31.398298 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jztkg" Jun 06 10:39:31 crc kubenswrapper[4911]: I0606 10:39:31.434601 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jztkg"] Jun 06 10:39:31 crc kubenswrapper[4911]: I0606 10:39:31.448321 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jztkg"] Jun 06 10:39:31 crc kubenswrapper[4911]: I0606 10:39:31.967955 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" path="/var/lib/kubelet/pods/6e4763f6-2ca1-4af0-a13e-a0bffb22154f/volumes" Jun 06 10:39:38 crc kubenswrapper[4911]: I0606 10:39:38.948451 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:39:38 crc kubenswrapper[4911]: E0606 10:39:38.949603 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:39:52 crc kubenswrapper[4911]: I0606 10:39:52.948324 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:39:52 crc kubenswrapper[4911]: E0606 10:39:52.949429 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.989487 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-lz9j6"] Jun 06 10:40:01 crc kubenswrapper[4911]: E0606 10:40:01.990748 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="extract-utilities" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.990768 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="extract-utilities" Jun 06 10:40:01 crc kubenswrapper[4911]: E0606 10:40:01.990789 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" containerName="container-00" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.990796 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" containerName="container-00" Jun 06 10:40:01 crc kubenswrapper[4911]: E0606 10:40:01.990831 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="registry-server" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.990838 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="registry-server" Jun 06 10:40:01 crc kubenswrapper[4911]: E0606 10:40:01.990852 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="extract-content" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.990857 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="extract-content" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.991028 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e4763f6-2ca1-4af0-a13e-a0bffb22154f" containerName="registry-server" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.991053 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="79d4e68a-32f8-4b1c-bf17-a99d1fff2f48" containerName="container-00" Jun 06 10:40:01 crc kubenswrapper[4911]: I0606 10:40:01.991773 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.108703 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11cba59e-ef68-4d82-84de-7016be1508e3-host\") pod \"crc-debug-lz9j6\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.108878 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xsw9\" (UniqueName: \"kubernetes.io/projected/11cba59e-ef68-4d82-84de-7016be1508e3-kube-api-access-4xsw9\") pod \"crc-debug-lz9j6\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.211227 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xsw9\" (UniqueName: \"kubernetes.io/projected/11cba59e-ef68-4d82-84de-7016be1508e3-kube-api-access-4xsw9\") pod \"crc-debug-lz9j6\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.211735 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11cba59e-ef68-4d82-84de-7016be1508e3-host\") pod \"crc-debug-lz9j6\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.211863 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11cba59e-ef68-4d82-84de-7016be1508e3-host\") pod \"crc-debug-lz9j6\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.239862 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xsw9\" (UniqueName: \"kubernetes.io/projected/11cba59e-ef68-4d82-84de-7016be1508e3-kube-api-access-4xsw9\") pod \"crc-debug-lz9j6\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.310766 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lz9j6" Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.741916 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-lz9j6" event={"ID":"11cba59e-ef68-4d82-84de-7016be1508e3","Type":"ContainerStarted","Data":"5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf"} Jun 06 10:40:02 crc kubenswrapper[4911]: I0606 10:40:02.742190 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-lz9j6" event={"ID":"11cba59e-ef68-4d82-84de-7016be1508e3","Type":"ContainerStarted","Data":"048c0af5de311059636a1d66e9e179ab8d03fe690cd51648ab45982fa9faeaec"} Jun 06 10:40:03 crc kubenswrapper[4911]: I0606 10:40:03.785323 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-lz9j6" podStartSLOduration=2.785288004 podStartE2EDuration="2.785288004s" podCreationTimestamp="2025-06-06 10:40:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:40:03.778625512 +0000 UTC m=+5215.054051065" watchObservedRunningTime="2025-06-06 10:40:03.785288004 +0000 UTC m=+5215.060713557" Jun 06 10:40:07 crc kubenswrapper[4911]: I0606 10:40:07.949667 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:40:07 crc kubenswrapper[4911]: E0606 10:40:07.952294 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.299642 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-lz9j6"] Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.300947 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-lz9j6" podUID="11cba59e-ef68-4d82-84de-7016be1508e3" containerName="container-00" containerID="cri-o://5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf" gracePeriod=2 Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.315288 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-lz9j6"] Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.573996 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lz9j6" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.722203 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xsw9\" (UniqueName: \"kubernetes.io/projected/11cba59e-ef68-4d82-84de-7016be1508e3-kube-api-access-4xsw9\") pod \"11cba59e-ef68-4d82-84de-7016be1508e3\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.722274 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11cba59e-ef68-4d82-84de-7016be1508e3-host\") pod \"11cba59e-ef68-4d82-84de-7016be1508e3\" (UID: \"11cba59e-ef68-4d82-84de-7016be1508e3\") " Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.722454 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/11cba59e-ef68-4d82-84de-7016be1508e3-host" (OuterVolumeSpecName: "host") pod "11cba59e-ef68-4d82-84de-7016be1508e3" (UID: "11cba59e-ef68-4d82-84de-7016be1508e3"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.722769 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11cba59e-ef68-4d82-84de-7016be1508e3-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.733453 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11cba59e-ef68-4d82-84de-7016be1508e3-kube-api-access-4xsw9" (OuterVolumeSpecName: "kube-api-access-4xsw9") pod "11cba59e-ef68-4d82-84de-7016be1508e3" (UID: "11cba59e-ef68-4d82-84de-7016be1508e3"). InnerVolumeSpecName "kube-api-access-4xsw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.936442 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xsw9\" (UniqueName: \"kubernetes.io/projected/11cba59e-ef68-4d82-84de-7016be1508e3-kube-api-access-4xsw9\") on node \"crc\" DevicePath \"\"" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.968596 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11cba59e-ef68-4d82-84de-7016be1508e3" path="/var/lib/kubelet/pods/11cba59e-ef68-4d82-84de-7016be1508e3/volumes" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.968676 4911 generic.go:334] "Generic (PLEG): container finished" podID="11cba59e-ef68-4d82-84de-7016be1508e3" containerID="5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf" exitCode=0 Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.968782 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lz9j6" Jun 06 10:40:13 crc kubenswrapper[4911]: I0606 10:40:13.969994 4911 scope.go:117] "RemoveContainer" containerID="5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf" Jun 06 10:40:14 crc kubenswrapper[4911]: I0606 10:40:14.002054 4911 scope.go:117] "RemoveContainer" containerID="5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf" Jun 06 10:40:14 crc kubenswrapper[4911]: E0606 10:40:14.005438 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf\": container with ID starting with 5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf not found: ID does not exist" containerID="5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf" Jun 06 10:40:14 crc kubenswrapper[4911]: I0606 10:40:14.005500 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf"} err="failed to get container status \"5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf\": rpc error: code = NotFound desc = could not find container \"5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf\": container with ID starting with 5e933848bce7973d672504eb5f1f807addb6f172b856a6d9434f52b0c12a78bf not found: ID does not exist" Jun 06 10:40:18 crc kubenswrapper[4911]: I0606 10:40:18.948313 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:40:18 crc kubenswrapper[4911]: E0606 10:40:18.949328 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:40:23 crc kubenswrapper[4911]: I0606 10:40:23.432224 4911 scope.go:117] "RemoveContainer" containerID="f5665667c83f53df7915a2c224d6aa9fbc5415b2548080f42a8c84d094710761" Jun 06 10:40:31 crc kubenswrapper[4911]: I0606 10:40:31.949207 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:40:31 crc kubenswrapper[4911]: E0606 10:40:31.950722 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:40:44 crc kubenswrapper[4911]: I0606 10:40:44.949414 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:40:44 crc kubenswrapper[4911]: E0606 10:40:44.950327 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.555354 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z8wbr"] Jun 06 10:40:57 crc kubenswrapper[4911]: E0606 10:40:57.556809 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cba59e-ef68-4d82-84de-7016be1508e3" containerName="container-00" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.556840 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cba59e-ef68-4d82-84de-7016be1508e3" containerName="container-00" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.557140 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cba59e-ef68-4d82-84de-7016be1508e3" containerName="container-00" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.559030 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.572235 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z8wbr"] Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.738431 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-utilities\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.738698 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p9bc\" (UniqueName: \"kubernetes.io/projected/1c8acd65-57df-45d6-939d-d82b321b60df-kube-api-access-7p9bc\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.738798 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-catalog-content\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.841774 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-utilities\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.841940 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p9bc\" (UniqueName: \"kubernetes.io/projected/1c8acd65-57df-45d6-939d-d82b321b60df-kube-api-access-7p9bc\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.842001 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-catalog-content\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.843809 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-utilities\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.844629 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-catalog-content\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.876255 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p9bc\" (UniqueName: \"kubernetes.io/projected/1c8acd65-57df-45d6-939d-d82b321b60df-kube-api-access-7p9bc\") pod \"certified-operators-z8wbr\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:57 crc kubenswrapper[4911]: I0606 10:40:57.880794 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:40:58 crc kubenswrapper[4911]: I0606 10:40:58.585264 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z8wbr"] Jun 06 10:40:58 crc kubenswrapper[4911]: I0606 10:40:58.949479 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:40:58 crc kubenswrapper[4911]: E0606 10:40:58.949905 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:40:59 crc kubenswrapper[4911]: I0606 10:40:59.453408 4911 generic.go:334] "Generic (PLEG): container finished" podID="1c8acd65-57df-45d6-939d-d82b321b60df" containerID="09143f27015247b1dbb0ff6a5e4d201c651d80e1b6d5c79b8306623c4762bfd8" exitCode=0 Jun 06 10:40:59 crc kubenswrapper[4911]: I0606 10:40:59.453495 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8wbr" event={"ID":"1c8acd65-57df-45d6-939d-d82b321b60df","Type":"ContainerDied","Data":"09143f27015247b1dbb0ff6a5e4d201c651d80e1b6d5c79b8306623c4762bfd8"} Jun 06 10:40:59 crc kubenswrapper[4911]: I0606 10:40:59.453788 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8wbr" event={"ID":"1c8acd65-57df-45d6-939d-d82b321b60df","Type":"ContainerStarted","Data":"277b1cf784bb056e55bcb920e2f71f9d9bfe9758e81c0c07e230d6e4a722c5c7"} Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.738347 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-vtmpg"] Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.740675 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vtmpg" Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.754169 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrzjr\" (UniqueName: \"kubernetes.io/projected/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-kube-api-access-nrzjr\") pod \"crc-debug-vtmpg\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " pod="openstack/crc-debug-vtmpg" Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.754326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-host\") pod \"crc-debug-vtmpg\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " pod="openstack/crc-debug-vtmpg" Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.857132 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-host\") pod \"crc-debug-vtmpg\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " pod="openstack/crc-debug-vtmpg" Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.857290 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-host\") pod \"crc-debug-vtmpg\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " pod="openstack/crc-debug-vtmpg" Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.857369 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrzjr\" (UniqueName: \"kubernetes.io/projected/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-kube-api-access-nrzjr\") pod \"crc-debug-vtmpg\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " pod="openstack/crc-debug-vtmpg" Jun 06 10:41:01 crc kubenswrapper[4911]: I0606 10:41:01.918636 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrzjr\" (UniqueName: \"kubernetes.io/projected/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-kube-api-access-nrzjr\") pod \"crc-debug-vtmpg\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " pod="openstack/crc-debug-vtmpg" Jun 06 10:41:02 crc kubenswrapper[4911]: I0606 10:41:02.105587 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vtmpg" Jun 06 10:41:02 crc kubenswrapper[4911]: I0606 10:41:02.486821 4911 generic.go:334] "Generic (PLEG): container finished" podID="1c8acd65-57df-45d6-939d-d82b321b60df" containerID="6d77e2c7541a612312781b21eb26ab16cb0de3a907510853e15f243d6e8203bc" exitCode=0 Jun 06 10:41:02 crc kubenswrapper[4911]: I0606 10:41:02.486900 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8wbr" event={"ID":"1c8acd65-57df-45d6-939d-d82b321b60df","Type":"ContainerDied","Data":"6d77e2c7541a612312781b21eb26ab16cb0de3a907510853e15f243d6e8203bc"} Jun 06 10:41:02 crc kubenswrapper[4911]: I0606 10:41:02.488945 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-vtmpg" event={"ID":"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8","Type":"ContainerStarted","Data":"4085dcb1cb6d7343bc3dd8724a3cc5a5902d2db61a69c366f02411ad0f8add02"} Jun 06 10:41:03 crc kubenswrapper[4911]: I0606 10:41:03.500772 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-vtmpg" event={"ID":"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8","Type":"ContainerStarted","Data":"ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999"} Jun 06 10:41:03 crc kubenswrapper[4911]: I0606 10:41:03.530575 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-vtmpg" podStartSLOduration=2.53055379 podStartE2EDuration="2.53055379s" podCreationTimestamp="2025-06-06 10:41:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:41:03.522684886 +0000 UTC m=+5274.798110429" watchObservedRunningTime="2025-06-06 10:41:03.53055379 +0000 UTC m=+5274.805979333" Jun 06 10:41:04 crc kubenswrapper[4911]: I0606 10:41:04.516154 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8wbr" event={"ID":"1c8acd65-57df-45d6-939d-d82b321b60df","Type":"ContainerStarted","Data":"ec3e2bdc2b496dbc416c81b4482be4ef930b40939879c92b38c8bf1c6c3f350a"} Jun 06 10:41:04 crc kubenswrapper[4911]: I0606 10:41:04.543544 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z8wbr" podStartSLOduration=3.6412773 podStartE2EDuration="7.543511441s" podCreationTimestamp="2025-06-06 10:40:57 +0000 UTC" firstStartedPulling="2025-06-06 10:40:59.456780048 +0000 UTC m=+5270.732205611" lastFinishedPulling="2025-06-06 10:41:03.359014209 +0000 UTC m=+5274.634439752" observedRunningTime="2025-06-06 10:41:04.53495076 +0000 UTC m=+5275.810376313" watchObservedRunningTime="2025-06-06 10:41:04.543511441 +0000 UTC m=+5275.818936994" Jun 06 10:41:07 crc kubenswrapper[4911]: I0606 10:41:07.881236 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:41:07 crc kubenswrapper[4911]: I0606 10:41:07.882134 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:41:07 crc kubenswrapper[4911]: I0606 10:41:07.966575 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:41:08 crc kubenswrapper[4911]: I0606 10:41:08.614887 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:41:08 crc kubenswrapper[4911]: I0606 10:41:08.682170 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z8wbr"] Jun 06 10:41:10 crc kubenswrapper[4911]: I0606 10:41:10.575505 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z8wbr" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="registry-server" containerID="cri-o://ec3e2bdc2b496dbc416c81b4482be4ef930b40939879c92b38c8bf1c6c3f350a" gracePeriod=2 Jun 06 10:41:11 crc kubenswrapper[4911]: I0606 10:41:11.587894 4911 generic.go:334] "Generic (PLEG): container finished" podID="1c8acd65-57df-45d6-939d-d82b321b60df" containerID="ec3e2bdc2b496dbc416c81b4482be4ef930b40939879c92b38c8bf1c6c3f350a" exitCode=0 Jun 06 10:41:11 crc kubenswrapper[4911]: I0606 10:41:11.587986 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8wbr" event={"ID":"1c8acd65-57df-45d6-939d-d82b321b60df","Type":"ContainerDied","Data":"ec3e2bdc2b496dbc416c81b4482be4ef930b40939879c92b38c8bf1c6c3f350a"} Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.053523 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.235483 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-utilities\") pod \"1c8acd65-57df-45d6-939d-d82b321b60df\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.235572 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p9bc\" (UniqueName: \"kubernetes.io/projected/1c8acd65-57df-45d6-939d-d82b321b60df-kube-api-access-7p9bc\") pod \"1c8acd65-57df-45d6-939d-d82b321b60df\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.235773 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-catalog-content\") pod \"1c8acd65-57df-45d6-939d-d82b321b60df\" (UID: \"1c8acd65-57df-45d6-939d-d82b321b60df\") " Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.236673 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-utilities" (OuterVolumeSpecName: "utilities") pod "1c8acd65-57df-45d6-939d-d82b321b60df" (UID: "1c8acd65-57df-45d6-939d-d82b321b60df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.236865 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.242994 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c8acd65-57df-45d6-939d-d82b321b60df-kube-api-access-7p9bc" (OuterVolumeSpecName: "kube-api-access-7p9bc") pod "1c8acd65-57df-45d6-939d-d82b321b60df" (UID: "1c8acd65-57df-45d6-939d-d82b321b60df"). InnerVolumeSpecName "kube-api-access-7p9bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.280065 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c8acd65-57df-45d6-939d-d82b321b60df" (UID: "1c8acd65-57df-45d6-939d-d82b321b60df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.339582 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8acd65-57df-45d6-939d-d82b321b60df-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.339630 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p9bc\" (UniqueName: \"kubernetes.io/projected/1c8acd65-57df-45d6-939d-d82b321b60df-kube-api-access-7p9bc\") on node \"crc\" DevicePath \"\"" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.603437 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8wbr" event={"ID":"1c8acd65-57df-45d6-939d-d82b321b60df","Type":"ContainerDied","Data":"277b1cf784bb056e55bcb920e2f71f9d9bfe9758e81c0c07e230d6e4a722c5c7"} Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.603528 4911 scope.go:117] "RemoveContainer" containerID="ec3e2bdc2b496dbc416c81b4482be4ef930b40939879c92b38c8bf1c6c3f350a" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.603549 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8wbr" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.637599 4911 scope.go:117] "RemoveContainer" containerID="6d77e2c7541a612312781b21eb26ab16cb0de3a907510853e15f243d6e8203bc" Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.659159 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z8wbr"] Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.673291 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z8wbr"] Jun 06 10:41:12 crc kubenswrapper[4911]: I0606 10:41:12.674608 4911 scope.go:117] "RemoveContainer" containerID="09143f27015247b1dbb0ff6a5e4d201c651d80e1b6d5c79b8306623c4762bfd8" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.167195 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-vtmpg"] Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.167987 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-vtmpg" podUID="7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" containerName="container-00" containerID="cri-o://ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999" gracePeriod=2 Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.186628 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-vtmpg"] Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.298388 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vtmpg" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.367417 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrzjr\" (UniqueName: \"kubernetes.io/projected/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-kube-api-access-nrzjr\") pod \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.367826 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-host\") pod \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\" (UID: \"7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8\") " Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.367979 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-host" (OuterVolumeSpecName: "host") pod "7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" (UID: "7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.369074 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.372518 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-kube-api-access-nrzjr" (OuterVolumeSpecName: "kube-api-access-nrzjr") pod "7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" (UID: "7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8"). InnerVolumeSpecName "kube-api-access-nrzjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.471199 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrzjr\" (UniqueName: \"kubernetes.io/projected/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8-kube-api-access-nrzjr\") on node \"crc\" DevicePath \"\"" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.619934 4911 generic.go:334] "Generic (PLEG): container finished" podID="7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" containerID="ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999" exitCode=0 Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.620046 4911 scope.go:117] "RemoveContainer" containerID="ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.620041 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-vtmpg" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.664658 4911 scope.go:117] "RemoveContainer" containerID="ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999" Jun 06 10:41:13 crc kubenswrapper[4911]: E0606 10:41:13.665335 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999\": container with ID starting with ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999 not found: ID does not exist" containerID="ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.665381 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999"} err="failed to get container status \"ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999\": rpc error: code = NotFound desc = could not find container \"ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999\": container with ID starting with ab38e9290f4312141a3e7a2d8e2cdef292214afa748a4879129156a4d2735999 not found: ID does not exist" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.948079 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:41:13 crc kubenswrapper[4911]: E0606 10:41:13.948713 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.959841 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" path="/var/lib/kubelet/pods/1c8acd65-57df-45d6-939d-d82b321b60df/volumes" Jun 06 10:41:13 crc kubenswrapper[4911]: I0606 10:41:13.961163 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" path="/var/lib/kubelet/pods/7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8/volumes" Jun 06 10:41:25 crc kubenswrapper[4911]: I0606 10:41:25.948753 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:41:26 crc kubenswrapper[4911]: I0606 10:41:26.764769 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"ad549db7e80199891d6397a0a77c646ef8025ceab1a43370ad30a9aaeb95578d"} Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.589922 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-cv4lv"] Jun 06 10:42:01 crc kubenswrapper[4911]: E0606 10:42:01.590907 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="extract-content" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.590923 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="extract-content" Jun 06 10:42:01 crc kubenswrapper[4911]: E0606 10:42:01.590957 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="registry-server" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.590964 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="registry-server" Jun 06 10:42:01 crc kubenswrapper[4911]: E0606 10:42:01.590988 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" containerName="container-00" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.590996 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" containerName="container-00" Jun 06 10:42:01 crc kubenswrapper[4911]: E0606 10:42:01.591018 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="extract-utilities" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.591025 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="extract-utilities" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.591241 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e1c0c1d-5ca4-4fef-96f9-7f151ef431c8" containerName="container-00" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.591269 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c8acd65-57df-45d6-939d-d82b321b60df" containerName="registry-server" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.592240 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cv4lv" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.765337 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-host\") pod \"crc-debug-cv4lv\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " pod="openstack/crc-debug-cv4lv" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.765428 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58jqg\" (UniqueName: \"kubernetes.io/projected/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-kube-api-access-58jqg\") pod \"crc-debug-cv4lv\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " pod="openstack/crc-debug-cv4lv" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.867712 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-host\") pod \"crc-debug-cv4lv\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " pod="openstack/crc-debug-cv4lv" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.867808 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58jqg\" (UniqueName: \"kubernetes.io/projected/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-kube-api-access-58jqg\") pod \"crc-debug-cv4lv\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " pod="openstack/crc-debug-cv4lv" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.867955 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-host\") pod \"crc-debug-cv4lv\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " pod="openstack/crc-debug-cv4lv" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.894634 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58jqg\" (UniqueName: \"kubernetes.io/projected/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-kube-api-access-58jqg\") pod \"crc-debug-cv4lv\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " pod="openstack/crc-debug-cv4lv" Jun 06 10:42:01 crc kubenswrapper[4911]: I0606 10:42:01.915278 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cv4lv" Jun 06 10:42:02 crc kubenswrapper[4911]: I0606 10:42:02.185358 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-cv4lv" event={"ID":"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f","Type":"ContainerStarted","Data":"36244ebda9450ca4c05eeb378f00a6b651f76de8dec7af323e397b3ede7a9aea"} Jun 06 10:42:03 crc kubenswrapper[4911]: I0606 10:42:03.196975 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-cv4lv" event={"ID":"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f","Type":"ContainerStarted","Data":"800b2658f849728fcfedab390dab36937d461a83de85c60d810f6699092820af"} Jun 06 10:42:03 crc kubenswrapper[4911]: I0606 10:42:03.215165 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-cv4lv" podStartSLOduration=2.215136114 podStartE2EDuration="2.215136114s" podCreationTimestamp="2025-06-06 10:42:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:42:03.210298779 +0000 UTC m=+5334.485724332" watchObservedRunningTime="2025-06-06 10:42:03.215136114 +0000 UTC m=+5334.490561657" Jun 06 10:42:12 crc kubenswrapper[4911]: E0606 10:42:12.706252 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94eb7fcf_6c0d_4c82_8244_6fbf90f40e9f.slice/crio-800b2658f849728fcfedab390dab36937d461a83de85c60d810f6699092820af.scope\": RecentStats: unable to find data in memory cache]" Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.315035 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-cv4lv"] Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.315861 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-cv4lv" podUID="94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" containerName="container-00" containerID="cri-o://800b2658f849728fcfedab390dab36937d461a83de85c60d810f6699092820af" gracePeriod=2 Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.342057 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-cv4lv"] Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.374631 4911 generic.go:334] "Generic (PLEG): container finished" podID="94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" containerID="800b2658f849728fcfedab390dab36937d461a83de85c60d810f6699092820af" exitCode=0 Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.874770 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cv4lv" Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.982657 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58jqg\" (UniqueName: \"kubernetes.io/projected/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-kube-api-access-58jqg\") pod \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.982811 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-host\") pod \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\" (UID: \"94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f\") " Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.982970 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-host" (OuterVolumeSpecName: "host") pod "94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" (UID: "94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.983330 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:42:13 crc kubenswrapper[4911]: I0606 10:42:13.991870 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-kube-api-access-58jqg" (OuterVolumeSpecName: "kube-api-access-58jqg") pod "94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" (UID: "94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f"). InnerVolumeSpecName "kube-api-access-58jqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:42:14 crc kubenswrapper[4911]: I0606 10:42:14.086411 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58jqg\" (UniqueName: \"kubernetes.io/projected/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f-kube-api-access-58jqg\") on node \"crc\" DevicePath \"\"" Jun 06 10:42:14 crc kubenswrapper[4911]: I0606 10:42:14.387219 4911 scope.go:117] "RemoveContainer" containerID="800b2658f849728fcfedab390dab36937d461a83de85c60d810f6699092820af" Jun 06 10:42:14 crc kubenswrapper[4911]: I0606 10:42:14.387285 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-cv4lv" Jun 06 10:42:15 crc kubenswrapper[4911]: I0606 10:42:15.960497 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" path="/var/lib/kubelet/pods/94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f/volumes" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.711785 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nqmb9"] Jun 06 10:42:59 crc kubenswrapper[4911]: E0606 10:42:59.713519 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" containerName="container-00" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.713540 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" containerName="container-00" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.713800 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="94eb7fcf-6c0d-4c82-8244-6fbf90f40e9f" containerName="container-00" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.716619 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.744619 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqmb9"] Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.854318 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-catalog-content\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.854454 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp5dq\" (UniqueName: \"kubernetes.io/projected/55df3ca8-a98f-4174-a001-d5e7496b93f6-kube-api-access-gp5dq\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.854511 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-utilities\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.956667 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp5dq\" (UniqueName: \"kubernetes.io/projected/55df3ca8-a98f-4174-a001-d5e7496b93f6-kube-api-access-gp5dq\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.956743 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-utilities\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.957055 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-catalog-content\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.957456 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-utilities\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.957651 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-catalog-content\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:42:59 crc kubenswrapper[4911]: I0606 10:42:59.986561 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp5dq\" (UniqueName: \"kubernetes.io/projected/55df3ca8-a98f-4174-a001-d5e7496b93f6-kube-api-access-gp5dq\") pod \"redhat-marketplace-nqmb9\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:00 crc kubenswrapper[4911]: I0606 10:43:00.050851 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:00 crc kubenswrapper[4911]: I0606 10:43:00.755614 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqmb9"] Jun 06 10:43:00 crc kubenswrapper[4911]: I0606 10:43:00.927431 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqmb9" event={"ID":"55df3ca8-a98f-4174-a001-d5e7496b93f6","Type":"ContainerStarted","Data":"b2cfa45745aa06ac7127dd60d3649c3c72c9caa3a06c7d35f2a6cc765321d572"} Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.745664 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-8c5n5"] Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.748692 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8c5n5" Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.810132 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94rcq\" (UniqueName: \"kubernetes.io/projected/43c9c4f1-7999-4d6d-a788-af41b3081d82-kube-api-access-94rcq\") pod \"crc-debug-8c5n5\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " pod="openstack/crc-debug-8c5n5" Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.810396 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43c9c4f1-7999-4d6d-a788-af41b3081d82-host\") pod \"crc-debug-8c5n5\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " pod="openstack/crc-debug-8c5n5" Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.914183 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43c9c4f1-7999-4d6d-a788-af41b3081d82-host\") pod \"crc-debug-8c5n5\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " pod="openstack/crc-debug-8c5n5" Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.914296 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43c9c4f1-7999-4d6d-a788-af41b3081d82-host\") pod \"crc-debug-8c5n5\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " pod="openstack/crc-debug-8c5n5" Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.914414 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94rcq\" (UniqueName: \"kubernetes.io/projected/43c9c4f1-7999-4d6d-a788-af41b3081d82-kube-api-access-94rcq\") pod \"crc-debug-8c5n5\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " pod="openstack/crc-debug-8c5n5" Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.936841 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94rcq\" (UniqueName: \"kubernetes.io/projected/43c9c4f1-7999-4d6d-a788-af41b3081d82-kube-api-access-94rcq\") pod \"crc-debug-8c5n5\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " pod="openstack/crc-debug-8c5n5" Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.940706 4911 generic.go:334] "Generic (PLEG): container finished" podID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerID="8710b066cfc9cd02fe16622b6af42df69d8cef38647e3b02293c48cac3491401" exitCode=0 Jun 06 10:43:01 crc kubenswrapper[4911]: I0606 10:43:01.940790 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqmb9" event={"ID":"55df3ca8-a98f-4174-a001-d5e7496b93f6","Type":"ContainerDied","Data":"8710b066cfc9cd02fe16622b6af42df69d8cef38647e3b02293c48cac3491401"} Jun 06 10:43:02 crc kubenswrapper[4911]: I0606 10:43:02.085148 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8c5n5" Jun 06 10:43:02 crc kubenswrapper[4911]: W0606 10:43:02.141646 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43c9c4f1_7999_4d6d_a788_af41b3081d82.slice/crio-4aa25e6ecf81b32f81d333d3531e5463022f4b840813b67fb609680ad85913da WatchSource:0}: Error finding container 4aa25e6ecf81b32f81d333d3531e5463022f4b840813b67fb609680ad85913da: Status 404 returned error can't find the container with id 4aa25e6ecf81b32f81d333d3531e5463022f4b840813b67fb609680ad85913da Jun 06 10:43:02 crc kubenswrapper[4911]: I0606 10:43:02.952391 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8c5n5" event={"ID":"43c9c4f1-7999-4d6d-a788-af41b3081d82","Type":"ContainerStarted","Data":"8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571"} Jun 06 10:43:02 crc kubenswrapper[4911]: I0606 10:43:02.952954 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8c5n5" event={"ID":"43c9c4f1-7999-4d6d-a788-af41b3081d82","Type":"ContainerStarted","Data":"4aa25e6ecf81b32f81d333d3531e5463022f4b840813b67fb609680ad85913da"} Jun 06 10:43:02 crc kubenswrapper[4911]: I0606 10:43:02.976927 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-8c5n5" podStartSLOduration=1.9769001689999999 podStartE2EDuration="1.976900169s" podCreationTimestamp="2025-06-06 10:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:43:02.967606749 +0000 UTC m=+5394.243032312" watchObservedRunningTime="2025-06-06 10:43:02.976900169 +0000 UTC m=+5394.252325712" Jun 06 10:43:03 crc kubenswrapper[4911]: I0606 10:43:03.967531 4911 generic.go:334] "Generic (PLEG): container finished" podID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerID="ea64a0745bd7f8c1018fedda77b0f8a4e6b4526554682797a6e07d8c69d33183" exitCode=0 Jun 06 10:43:03 crc kubenswrapper[4911]: I0606 10:43:03.967642 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqmb9" event={"ID":"55df3ca8-a98f-4174-a001-d5e7496b93f6","Type":"ContainerDied","Data":"ea64a0745bd7f8c1018fedda77b0f8a4e6b4526554682797a6e07d8c69d33183"} Jun 06 10:43:04 crc kubenswrapper[4911]: I0606 10:43:04.982342 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqmb9" event={"ID":"55df3ca8-a98f-4174-a001-d5e7496b93f6","Type":"ContainerStarted","Data":"d152881f8b99313127242ec8ebfe146d347ac5204630a7bc21cf06eec3488f03"} Jun 06 10:43:05 crc kubenswrapper[4911]: I0606 10:43:05.035502 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nqmb9" podStartSLOduration=3.415792489 podStartE2EDuration="6.035471046s" podCreationTimestamp="2025-06-06 10:42:59 +0000 UTC" firstStartedPulling="2025-06-06 10:43:01.944352993 +0000 UTC m=+5393.219778536" lastFinishedPulling="2025-06-06 10:43:04.56403155 +0000 UTC m=+5395.839457093" observedRunningTime="2025-06-06 10:43:05.029205234 +0000 UTC m=+5396.304630777" watchObservedRunningTime="2025-06-06 10:43:05.035471046 +0000 UTC m=+5396.310896589" Jun 06 10:43:10 crc kubenswrapper[4911]: I0606 10:43:10.051876 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:10 crc kubenswrapper[4911]: I0606 10:43:10.052840 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:10 crc kubenswrapper[4911]: I0606 10:43:10.127902 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:11 crc kubenswrapper[4911]: I0606 10:43:11.099945 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:11 crc kubenswrapper[4911]: I0606 10:43:11.166486 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqmb9"] Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.070210 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nqmb9" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="registry-server" containerID="cri-o://d152881f8b99313127242ec8ebfe146d347ac5204630a7bc21cf06eec3488f03" gracePeriod=2 Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.373673 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-8c5n5"] Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.374008 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-8c5n5" podUID="43c9c4f1-7999-4d6d-a788-af41b3081d82" containerName="container-00" containerID="cri-o://8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571" gracePeriod=2 Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.392675 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-8c5n5"] Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.656733 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8c5n5" Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.701539 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94rcq\" (UniqueName: \"kubernetes.io/projected/43c9c4f1-7999-4d6d-a788-af41b3081d82-kube-api-access-94rcq\") pod \"43c9c4f1-7999-4d6d-a788-af41b3081d82\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.702079 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43c9c4f1-7999-4d6d-a788-af41b3081d82-host\") pod \"43c9c4f1-7999-4d6d-a788-af41b3081d82\" (UID: \"43c9c4f1-7999-4d6d-a788-af41b3081d82\") " Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.702160 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43c9c4f1-7999-4d6d-a788-af41b3081d82-host" (OuterVolumeSpecName: "host") pod "43c9c4f1-7999-4d6d-a788-af41b3081d82" (UID: "43c9c4f1-7999-4d6d-a788-af41b3081d82"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.702951 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43c9c4f1-7999-4d6d-a788-af41b3081d82-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.713286 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43c9c4f1-7999-4d6d-a788-af41b3081d82-kube-api-access-94rcq" (OuterVolumeSpecName: "kube-api-access-94rcq") pod "43c9c4f1-7999-4d6d-a788-af41b3081d82" (UID: "43c9c4f1-7999-4d6d-a788-af41b3081d82"). InnerVolumeSpecName "kube-api-access-94rcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.805036 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94rcq\" (UniqueName: \"kubernetes.io/projected/43c9c4f1-7999-4d6d-a788-af41b3081d82-kube-api-access-94rcq\") on node \"crc\" DevicePath \"\"" Jun 06 10:43:13 crc kubenswrapper[4911]: I0606 10:43:13.965145 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43c9c4f1-7999-4d6d-a788-af41b3081d82" path="/var/lib/kubelet/pods/43c9c4f1-7999-4d6d-a788-af41b3081d82/volumes" Jun 06 10:43:14 crc kubenswrapper[4911]: I0606 10:43:14.085348 4911 generic.go:334] "Generic (PLEG): container finished" podID="43c9c4f1-7999-4d6d-a788-af41b3081d82" containerID="8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571" exitCode=0 Jun 06 10:43:14 crc kubenswrapper[4911]: I0606 10:43:14.085480 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8c5n5" Jun 06 10:43:14 crc kubenswrapper[4911]: I0606 10:43:14.085734 4911 scope.go:117] "RemoveContainer" containerID="8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571" Jun 06 10:43:14 crc kubenswrapper[4911]: I0606 10:43:14.099606 4911 generic.go:334] "Generic (PLEG): container finished" podID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerID="d152881f8b99313127242ec8ebfe146d347ac5204630a7bc21cf06eec3488f03" exitCode=0 Jun 06 10:43:14 crc kubenswrapper[4911]: I0606 10:43:14.099689 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqmb9" event={"ID":"55df3ca8-a98f-4174-a001-d5e7496b93f6","Type":"ContainerDied","Data":"d152881f8b99313127242ec8ebfe146d347ac5204630a7bc21cf06eec3488f03"} Jun 06 10:43:14 crc kubenswrapper[4911]: I0606 10:43:14.119542 4911 scope.go:117] "RemoveContainer" containerID="8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571" Jun 06 10:43:14 crc kubenswrapper[4911]: E0606 10:43:14.120376 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571\": container with ID starting with 8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571 not found: ID does not exist" containerID="8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571" Jun 06 10:43:14 crc kubenswrapper[4911]: I0606 10:43:14.120463 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571"} err="failed to get container status \"8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571\": rpc error: code = NotFound desc = could not find container \"8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571\": container with ID starting with 8e45a8740a60baa9f4956b4dad2fa2f2c1844032b8a9eb37240d4b1220c5a571 not found: ID does not exist" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.115426 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqmb9" event={"ID":"55df3ca8-a98f-4174-a001-d5e7496b93f6","Type":"ContainerDied","Data":"b2cfa45745aa06ac7127dd60d3649c3c72c9caa3a06c7d35f2a6cc765321d572"} Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.115785 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2cfa45745aa06ac7127dd60d3649c3c72c9caa3a06c7d35f2a6cc765321d572" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.179572 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.240924 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-catalog-content\") pod \"55df3ca8-a98f-4174-a001-d5e7496b93f6\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.241019 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp5dq\" (UniqueName: \"kubernetes.io/projected/55df3ca8-a98f-4174-a001-d5e7496b93f6-kube-api-access-gp5dq\") pod \"55df3ca8-a98f-4174-a001-d5e7496b93f6\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.241157 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-utilities\") pod \"55df3ca8-a98f-4174-a001-d5e7496b93f6\" (UID: \"55df3ca8-a98f-4174-a001-d5e7496b93f6\") " Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.242692 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-utilities" (OuterVolumeSpecName: "utilities") pod "55df3ca8-a98f-4174-a001-d5e7496b93f6" (UID: "55df3ca8-a98f-4174-a001-d5e7496b93f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.249800 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55df3ca8-a98f-4174-a001-d5e7496b93f6-kube-api-access-gp5dq" (OuterVolumeSpecName: "kube-api-access-gp5dq") pod "55df3ca8-a98f-4174-a001-d5e7496b93f6" (UID: "55df3ca8-a98f-4174-a001-d5e7496b93f6"). InnerVolumeSpecName "kube-api-access-gp5dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.252506 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55df3ca8-a98f-4174-a001-d5e7496b93f6" (UID: "55df3ca8-a98f-4174-a001-d5e7496b93f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.344753 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.344807 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp5dq\" (UniqueName: \"kubernetes.io/projected/55df3ca8-a98f-4174-a001-d5e7496b93f6-kube-api-access-gp5dq\") on node \"crc\" DevicePath \"\"" Jun 06 10:43:15 crc kubenswrapper[4911]: I0606 10:43:15.344822 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55df3ca8-a98f-4174-a001-d5e7496b93f6-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:43:16 crc kubenswrapper[4911]: I0606 10:43:16.125681 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqmb9" Jun 06 10:43:16 crc kubenswrapper[4911]: I0606 10:43:16.170040 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqmb9"] Jun 06 10:43:16 crc kubenswrapper[4911]: I0606 10:43:16.181724 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqmb9"] Jun 06 10:43:17 crc kubenswrapper[4911]: I0606 10:43:17.963155 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" path="/var/lib/kubelet/pods/55df3ca8-a98f-4174-a001-d5e7496b93f6/volumes" Jun 06 10:43:54 crc kubenswrapper[4911]: I0606 10:43:54.301845 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:43:54 crc kubenswrapper[4911]: I0606 10:43:54.302863 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.863639 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-ts47t"] Jun 06 10:44:01 crc kubenswrapper[4911]: E0606 10:44:01.865037 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c9c4f1-7999-4d6d-a788-af41b3081d82" containerName="container-00" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.865057 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c9c4f1-7999-4d6d-a788-af41b3081d82" containerName="container-00" Jun 06 10:44:01 crc kubenswrapper[4911]: E0606 10:44:01.865072 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="extract-utilities" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.865079 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="extract-utilities" Jun 06 10:44:01 crc kubenswrapper[4911]: E0606 10:44:01.865107 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="registry-server" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.865114 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="registry-server" Jun 06 10:44:01 crc kubenswrapper[4911]: E0606 10:44:01.865130 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="extract-content" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.865136 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="extract-content" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.865403 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="55df3ca8-a98f-4174-a001-d5e7496b93f6" containerName="registry-server" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.865418 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c9c4f1-7999-4d6d-a788-af41b3081d82" containerName="container-00" Jun 06 10:44:01 crc kubenswrapper[4911]: I0606 10:44:01.866261 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.036845 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6812de37-31d5-48d4-9df2-e080453538d4-host\") pod \"crc-debug-ts47t\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.036964 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9x6r\" (UniqueName: \"kubernetes.io/projected/6812de37-31d5-48d4-9df2-e080453538d4-kube-api-access-c9x6r\") pod \"crc-debug-ts47t\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.139405 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9x6r\" (UniqueName: \"kubernetes.io/projected/6812de37-31d5-48d4-9df2-e080453538d4-kube-api-access-c9x6r\") pod \"crc-debug-ts47t\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.139639 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6812de37-31d5-48d4-9df2-e080453538d4-host\") pod \"crc-debug-ts47t\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.139844 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6812de37-31d5-48d4-9df2-e080453538d4-host\") pod \"crc-debug-ts47t\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.162302 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9x6r\" (UniqueName: \"kubernetes.io/projected/6812de37-31d5-48d4-9df2-e080453538d4-kube-api-access-c9x6r\") pod \"crc-debug-ts47t\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.191568 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ts47t" Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.660960 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-ts47t" event={"ID":"6812de37-31d5-48d4-9df2-e080453538d4","Type":"ContainerStarted","Data":"fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6"} Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.661486 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-ts47t" event={"ID":"6812de37-31d5-48d4-9df2-e080453538d4","Type":"ContainerStarted","Data":"8e47a19fcd3081a7831562dccb9a778284a4dd997c46cc34a64869cf0c35ce01"} Jun 06 10:44:02 crc kubenswrapper[4911]: I0606 10:44:02.677590 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-ts47t" podStartSLOduration=1.67757256 podStartE2EDuration="1.67757256s" podCreationTimestamp="2025-06-06 10:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:44:02.676600745 +0000 UTC m=+5453.952026308" watchObservedRunningTime="2025-06-06 10:44:02.67757256 +0000 UTC m=+5453.952998093" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.069489 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-ts47t"] Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.075579 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-ts47t" podUID="6812de37-31d5-48d4-9df2-e080453538d4" containerName="container-00" containerID="cri-o://fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6" gracePeriod=2 Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.077462 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-ts47t"] Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.206624 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ts47t" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.245267 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6812de37-31d5-48d4-9df2-e080453538d4-host\") pod \"6812de37-31d5-48d4-9df2-e080453538d4\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.245871 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9x6r\" (UniqueName: \"kubernetes.io/projected/6812de37-31d5-48d4-9df2-e080453538d4-kube-api-access-c9x6r\") pod \"6812de37-31d5-48d4-9df2-e080453538d4\" (UID: \"6812de37-31d5-48d4-9df2-e080453538d4\") " Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.245478 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6812de37-31d5-48d4-9df2-e080453538d4-host" (OuterVolumeSpecName: "host") pod "6812de37-31d5-48d4-9df2-e080453538d4" (UID: "6812de37-31d5-48d4-9df2-e080453538d4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.254977 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6812de37-31d5-48d4-9df2-e080453538d4-kube-api-access-c9x6r" (OuterVolumeSpecName: "kube-api-access-c9x6r") pod "6812de37-31d5-48d4-9df2-e080453538d4" (UID: "6812de37-31d5-48d4-9df2-e080453538d4"). InnerVolumeSpecName "kube-api-access-c9x6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.349214 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6812de37-31d5-48d4-9df2-e080453538d4-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.349262 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9x6r\" (UniqueName: \"kubernetes.io/projected/6812de37-31d5-48d4-9df2-e080453538d4-kube-api-access-c9x6r\") on node \"crc\" DevicePath \"\"" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.774890 4911 generic.go:334] "Generic (PLEG): container finished" podID="6812de37-31d5-48d4-9df2-e080453538d4" containerID="fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6" exitCode=0 Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.774970 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-ts47t" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.774988 4911 scope.go:117] "RemoveContainer" containerID="fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.807530 4911 scope.go:117] "RemoveContainer" containerID="fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6" Jun 06 10:44:13 crc kubenswrapper[4911]: E0606 10:44:13.808116 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6\": container with ID starting with fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6 not found: ID does not exist" containerID="fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.808159 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6"} err="failed to get container status \"fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6\": rpc error: code = NotFound desc = could not find container \"fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6\": container with ID starting with fe118e95ace7e98d2dc3f59ed25ff45612229f06771858a64fea8b04c8d573d6 not found: ID does not exist" Jun 06 10:44:13 crc kubenswrapper[4911]: I0606 10:44:13.963809 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6812de37-31d5-48d4-9df2-e080453538d4" path="/var/lib/kubelet/pods/6812de37-31d5-48d4-9df2-e080453538d4/volumes" Jun 06 10:44:24 crc kubenswrapper[4911]: I0606 10:44:24.300976 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:44:24 crc kubenswrapper[4911]: I0606 10:44:24.302026 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:44:54 crc kubenswrapper[4911]: I0606 10:44:54.300003 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:44:54 crc kubenswrapper[4911]: I0606 10:44:54.300950 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:44:54 crc kubenswrapper[4911]: I0606 10:44:54.301027 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:44:54 crc kubenswrapper[4911]: I0606 10:44:54.302256 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ad549db7e80199891d6397a0a77c646ef8025ceab1a43370ad30a9aaeb95578d"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:44:54 crc kubenswrapper[4911]: I0606 10:44:54.302361 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://ad549db7e80199891d6397a0a77c646ef8025ceab1a43370ad30a9aaeb95578d" gracePeriod=600 Jun 06 10:44:55 crc kubenswrapper[4911]: I0606 10:44:55.226885 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="ad549db7e80199891d6397a0a77c646ef8025ceab1a43370ad30a9aaeb95578d" exitCode=0 Jun 06 10:44:55 crc kubenswrapper[4911]: I0606 10:44:55.226992 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"ad549db7e80199891d6397a0a77c646ef8025ceab1a43370ad30a9aaeb95578d"} Jun 06 10:44:55 crc kubenswrapper[4911]: I0606 10:44:55.227811 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c"} Jun 06 10:44:55 crc kubenswrapper[4911]: I0606 10:44:55.227848 4911 scope.go:117] "RemoveContainer" containerID="5ff68d6fee639dc01dcef2fae65a9d24db54ab4d65941eaa72efad32c8cc4d39" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.159018 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4"] Jun 06 10:45:00 crc kubenswrapper[4911]: E0606 10:45:00.160354 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6812de37-31d5-48d4-9df2-e080453538d4" containerName="container-00" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.160375 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="6812de37-31d5-48d4-9df2-e080453538d4" containerName="container-00" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.160657 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="6812de37-31d5-48d4-9df2-e080453538d4" containerName="container-00" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.161743 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.165159 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.165565 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.173715 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4"] Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.233823 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe0beca2-26f5-4741-8039-c412d7f3484e-config-volume\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.233946 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwhfz\" (UniqueName: \"kubernetes.io/projected/fe0beca2-26f5-4741-8039-c412d7f3484e-kube-api-access-dwhfz\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.233971 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe0beca2-26f5-4741-8039-c412d7f3484e-secret-volume\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.336259 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe0beca2-26f5-4741-8039-c412d7f3484e-config-volume\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.336382 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwhfz\" (UniqueName: \"kubernetes.io/projected/fe0beca2-26f5-4741-8039-c412d7f3484e-kube-api-access-dwhfz\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.336421 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe0beca2-26f5-4741-8039-c412d7f3484e-secret-volume\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.337727 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe0beca2-26f5-4741-8039-c412d7f3484e-config-volume\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.345437 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe0beca2-26f5-4741-8039-c412d7f3484e-secret-volume\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.357887 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwhfz\" (UniqueName: \"kubernetes.io/projected/fe0beca2-26f5-4741-8039-c412d7f3484e-kube-api-access-dwhfz\") pod \"collect-profiles-29153445-5rxq4\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:00 crc kubenswrapper[4911]: I0606 10:45:00.491168 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.288515 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4"] Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.293232 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" event={"ID":"fe0beca2-26f5-4741-8039-c412d7f3484e","Type":"ContainerStarted","Data":"de29b8ff652fd25a2c338a1fa22774d41b17a21a84120cd0060bfce921bdc8c4"} Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.507805 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-qdl64"] Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.510062 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qdl64" Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.569995 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9a6492cb-2ae5-4a10-a94b-b4e98984e838-host\") pod \"crc-debug-qdl64\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " pod="openstack/crc-debug-qdl64" Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.570114 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx5t5\" (UniqueName: \"kubernetes.io/projected/9a6492cb-2ae5-4a10-a94b-b4e98984e838-kube-api-access-qx5t5\") pod \"crc-debug-qdl64\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " pod="openstack/crc-debug-qdl64" Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.672590 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9a6492cb-2ae5-4a10-a94b-b4e98984e838-host\") pod \"crc-debug-qdl64\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " pod="openstack/crc-debug-qdl64" Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.672669 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9a6492cb-2ae5-4a10-a94b-b4e98984e838-host\") pod \"crc-debug-qdl64\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " pod="openstack/crc-debug-qdl64" Jun 06 10:45:01 crc kubenswrapper[4911]: I0606 10:45:01.672729 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx5t5\" (UniqueName: \"kubernetes.io/projected/9a6492cb-2ae5-4a10-a94b-b4e98984e838-kube-api-access-qx5t5\") pod \"crc-debug-qdl64\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " pod="openstack/crc-debug-qdl64" Jun 06 10:45:02 crc kubenswrapper[4911]: I0606 10:45:02.139084 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx5t5\" (UniqueName: \"kubernetes.io/projected/9a6492cb-2ae5-4a10-a94b-b4e98984e838-kube-api-access-qx5t5\") pod \"crc-debug-qdl64\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " pod="openstack/crc-debug-qdl64" Jun 06 10:45:02 crc kubenswrapper[4911]: I0606 10:45:02.306994 4911 generic.go:334] "Generic (PLEG): container finished" podID="fe0beca2-26f5-4741-8039-c412d7f3484e" containerID="43247031b2dfd0194cf6634c0ada7529402b511d78467793f5ef80ae4f295a43" exitCode=0 Jun 06 10:45:02 crc kubenswrapper[4911]: I0606 10:45:02.307121 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" event={"ID":"fe0beca2-26f5-4741-8039-c412d7f3484e","Type":"ContainerDied","Data":"43247031b2dfd0194cf6634c0ada7529402b511d78467793f5ef80ae4f295a43"} Jun 06 10:45:02 crc kubenswrapper[4911]: I0606 10:45:02.436053 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qdl64" Jun 06 10:45:02 crc kubenswrapper[4911]: W0606 10:45:02.477884 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a6492cb_2ae5_4a10_a94b_b4e98984e838.slice/crio-0c708223e000c1064937649989043de33cbdb23a388c79c87156b08ee3e9f59e WatchSource:0}: Error finding container 0c708223e000c1064937649989043de33cbdb23a388c79c87156b08ee3e9f59e: Status 404 returned error can't find the container with id 0c708223e000c1064937649989043de33cbdb23a388c79c87156b08ee3e9f59e Jun 06 10:45:03 crc kubenswrapper[4911]: I0606 10:45:03.324857 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qdl64" event={"ID":"9a6492cb-2ae5-4a10-a94b-b4e98984e838","Type":"ContainerStarted","Data":"3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968"} Jun 06 10:45:03 crc kubenswrapper[4911]: I0606 10:45:03.325448 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-qdl64" event={"ID":"9a6492cb-2ae5-4a10-a94b-b4e98984e838","Type":"ContainerStarted","Data":"0c708223e000c1064937649989043de33cbdb23a388c79c87156b08ee3e9f59e"} Jun 06 10:45:03 crc kubenswrapper[4911]: I0606 10:45:03.347604 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-qdl64" podStartSLOduration=2.3475745359999998 podStartE2EDuration="2.347574536s" podCreationTimestamp="2025-06-06 10:45:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:45:03.344610529 +0000 UTC m=+5514.620036083" watchObservedRunningTime="2025-06-06 10:45:03.347574536 +0000 UTC m=+5514.623000089" Jun 06 10:45:04 crc kubenswrapper[4911]: I0606 10:45:04.869429 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:04 crc kubenswrapper[4911]: I0606 10:45:04.950058 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe0beca2-26f5-4741-8039-c412d7f3484e-config-volume\") pod \"fe0beca2-26f5-4741-8039-c412d7f3484e\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " Jun 06 10:45:04 crc kubenswrapper[4911]: I0606 10:45:04.950375 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwhfz\" (UniqueName: \"kubernetes.io/projected/fe0beca2-26f5-4741-8039-c412d7f3484e-kube-api-access-dwhfz\") pod \"fe0beca2-26f5-4741-8039-c412d7f3484e\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " Jun 06 10:45:04 crc kubenswrapper[4911]: I0606 10:45:04.950669 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe0beca2-26f5-4741-8039-c412d7f3484e-secret-volume\") pod \"fe0beca2-26f5-4741-8039-c412d7f3484e\" (UID: \"fe0beca2-26f5-4741-8039-c412d7f3484e\") " Jun 06 10:45:04 crc kubenswrapper[4911]: I0606 10:45:04.952122 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe0beca2-26f5-4741-8039-c412d7f3484e-config-volume" (OuterVolumeSpecName: "config-volume") pod "fe0beca2-26f5-4741-8039-c412d7f3484e" (UID: "fe0beca2-26f5-4741-8039-c412d7f3484e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.036205 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe0beca2-26f5-4741-8039-c412d7f3484e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fe0beca2-26f5-4741-8039-c412d7f3484e" (UID: "fe0beca2-26f5-4741-8039-c412d7f3484e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.036344 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe0beca2-26f5-4741-8039-c412d7f3484e-kube-api-access-dwhfz" (OuterVolumeSpecName: "kube-api-access-dwhfz") pod "fe0beca2-26f5-4741-8039-c412d7f3484e" (UID: "fe0beca2-26f5-4741-8039-c412d7f3484e"). InnerVolumeSpecName "kube-api-access-dwhfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.052971 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe0beca2-26f5-4741-8039-c412d7f3484e-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.053020 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe0beca2-26f5-4741-8039-c412d7f3484e-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.053038 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwhfz\" (UniqueName: \"kubernetes.io/projected/fe0beca2-26f5-4741-8039-c412d7f3484e-kube-api-access-dwhfz\") on node \"crc\" DevicePath \"\"" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.347660 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" event={"ID":"fe0beca2-26f5-4741-8039-c412d7f3484e","Type":"ContainerDied","Data":"de29b8ff652fd25a2c338a1fa22774d41b17a21a84120cd0060bfce921bdc8c4"} Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.347725 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de29b8ff652fd25a2c338a1fa22774d41b17a21a84120cd0060bfce921bdc8c4" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.347792 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153445-5rxq4" Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.960384 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72"] Jun 06 10:45:05 crc kubenswrapper[4911]: I0606 10:45:05.971152 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153400-j9h72"] Jun 06 10:45:07 crc kubenswrapper[4911]: I0606 10:45:07.960655 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b46d4c1-85c3-4862-bd90-0a14073266a6" path="/var/lib/kubelet/pods/0b46d4c1-85c3-4862-bd90-0a14073266a6/volumes" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.166564 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-qdl64"] Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.167852 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-qdl64" podUID="9a6492cb-2ae5-4a10-a94b-b4e98984e838" containerName="container-00" containerID="cri-o://3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968" gracePeriod=2 Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.180362 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-qdl64"] Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.272060 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qdl64" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.370125 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9a6492cb-2ae5-4a10-a94b-b4e98984e838-host\") pod \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.370263 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx5t5\" (UniqueName: \"kubernetes.io/projected/9a6492cb-2ae5-4a10-a94b-b4e98984e838-kube-api-access-qx5t5\") pod \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\" (UID: \"9a6492cb-2ae5-4a10-a94b-b4e98984e838\") " Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.370251 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9a6492cb-2ae5-4a10-a94b-b4e98984e838-host" (OuterVolumeSpecName: "host") pod "9a6492cb-2ae5-4a10-a94b-b4e98984e838" (UID: "9a6492cb-2ae5-4a10-a94b-b4e98984e838"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.371106 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9a6492cb-2ae5-4a10-a94b-b4e98984e838-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.379858 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a6492cb-2ae5-4a10-a94b-b4e98984e838-kube-api-access-qx5t5" (OuterVolumeSpecName: "kube-api-access-qx5t5") pod "9a6492cb-2ae5-4a10-a94b-b4e98984e838" (UID: "9a6492cb-2ae5-4a10-a94b-b4e98984e838"). InnerVolumeSpecName "kube-api-access-qx5t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.469763 4911 generic.go:334] "Generic (PLEG): container finished" podID="9a6492cb-2ae5-4a10-a94b-b4e98984e838" containerID="3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968" exitCode=0 Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.469839 4911 scope.go:117] "RemoveContainer" containerID="3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.469973 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-qdl64" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.472636 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx5t5\" (UniqueName: \"kubernetes.io/projected/9a6492cb-2ae5-4a10-a94b-b4e98984e838-kube-api-access-qx5t5\") on node \"crc\" DevicePath \"\"" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.499150 4911 scope.go:117] "RemoveContainer" containerID="3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968" Jun 06 10:45:13 crc kubenswrapper[4911]: E0606 10:45:13.499657 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968\": container with ID starting with 3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968 not found: ID does not exist" containerID="3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.499698 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968"} err="failed to get container status \"3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968\": rpc error: code = NotFound desc = could not find container \"3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968\": container with ID starting with 3f3bfca8ff873e34438a6fdace5264fb5b8ea38d1ad4f38f51eb0f464e6e5968 not found: ID does not exist" Jun 06 10:45:13 crc kubenswrapper[4911]: I0606 10:45:13.967807 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a6492cb-2ae5-4a10-a94b-b4e98984e838" path="/var/lib/kubelet/pods/9a6492cb-2ae5-4a10-a94b-b4e98984e838/volumes" Jun 06 10:45:23 crc kubenswrapper[4911]: I0606 10:45:23.724220 4911 scope.go:117] "RemoveContainer" containerID="7e0f067cf978fd56498f13e97a89d210f27f22dafee73a508928716583c132cf" Jun 06 10:45:23 crc kubenswrapper[4911]: I0606 10:45:23.752339 4911 scope.go:117] "RemoveContainer" containerID="f9a1ef92d178a3c139f04916e144d1f0864d5ecf7ffa0c7e45a5a24e4765232d" Jun 06 10:45:23 crc kubenswrapper[4911]: I0606 10:45:23.784835 4911 scope.go:117] "RemoveContainer" containerID="b4669354bd6dca06c98943f7253818a318193239e4cb8d59b082555fa76c6b95" Jun 06 10:45:23 crc kubenswrapper[4911]: I0606 10:45:23.862513 4911 scope.go:117] "RemoveContainer" containerID="9d97be5c2c9d4f8a40763df820a94943421aec5ab21e1ebbdbee56e6059c0183" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.563918 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-75dp4"] Jun 06 10:46:01 crc kubenswrapper[4911]: E0606 10:46:01.565306 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe0beca2-26f5-4741-8039-c412d7f3484e" containerName="collect-profiles" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.565324 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe0beca2-26f5-4741-8039-c412d7f3484e" containerName="collect-profiles" Jun 06 10:46:01 crc kubenswrapper[4911]: E0606 10:46:01.565378 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a6492cb-2ae5-4a10-a94b-b4e98984e838" containerName="container-00" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.565385 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a6492cb-2ae5-4a10-a94b-b4e98984e838" containerName="container-00" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.565590 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a6492cb-2ae5-4a10-a94b-b4e98984e838" containerName="container-00" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.565609 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe0beca2-26f5-4741-8039-c412d7f3484e" containerName="collect-profiles" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.566414 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-75dp4" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.681522 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csrgj\" (UniqueName: \"kubernetes.io/projected/4299d3fd-519c-45d7-bf72-b5f555c45981-kube-api-access-csrgj\") pod \"crc-debug-75dp4\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " pod="openstack/crc-debug-75dp4" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.681647 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4299d3fd-519c-45d7-bf72-b5f555c45981-host\") pod \"crc-debug-75dp4\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " pod="openstack/crc-debug-75dp4" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.784244 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csrgj\" (UniqueName: \"kubernetes.io/projected/4299d3fd-519c-45d7-bf72-b5f555c45981-kube-api-access-csrgj\") pod \"crc-debug-75dp4\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " pod="openstack/crc-debug-75dp4" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.784584 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4299d3fd-519c-45d7-bf72-b5f555c45981-host\") pod \"crc-debug-75dp4\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " pod="openstack/crc-debug-75dp4" Jun 06 10:46:01 crc kubenswrapper[4911]: I0606 10:46:01.784741 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4299d3fd-519c-45d7-bf72-b5f555c45981-host\") pod \"crc-debug-75dp4\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " pod="openstack/crc-debug-75dp4" Jun 06 10:46:02 crc kubenswrapper[4911]: I0606 10:46:02.346043 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csrgj\" (UniqueName: \"kubernetes.io/projected/4299d3fd-519c-45d7-bf72-b5f555c45981-kube-api-access-csrgj\") pod \"crc-debug-75dp4\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " pod="openstack/crc-debug-75dp4" Jun 06 10:46:02 crc kubenswrapper[4911]: I0606 10:46:02.495719 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-75dp4" Jun 06 10:46:03 crc kubenswrapper[4911]: I0606 10:46:03.051031 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-75dp4" event={"ID":"4299d3fd-519c-45d7-bf72-b5f555c45981","Type":"ContainerStarted","Data":"fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3"} Jun 06 10:46:03 crc kubenswrapper[4911]: I0606 10:46:03.051696 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-75dp4" event={"ID":"4299d3fd-519c-45d7-bf72-b5f555c45981","Type":"ContainerStarted","Data":"80e9c9383960e05b8a44f415e0b17900da3131ab58b2acc6b8be976576638c54"} Jun 06 10:46:03 crc kubenswrapper[4911]: I0606 10:46:03.074928 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-75dp4" podStartSLOduration=2.074889375 podStartE2EDuration="2.074889375s" podCreationTimestamp="2025-06-06 10:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:46:03.065897852 +0000 UTC m=+5574.341323395" watchObservedRunningTime="2025-06-06 10:46:03.074889375 +0000 UTC m=+5574.350314928" Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.363744 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-75dp4"] Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.364767 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-75dp4" podUID="4299d3fd-519c-45d7-bf72-b5f555c45981" containerName="container-00" containerID="cri-o://fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3" gracePeriod=2 Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.376503 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-75dp4"] Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.505198 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-75dp4" Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.705043 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csrgj\" (UniqueName: \"kubernetes.io/projected/4299d3fd-519c-45d7-bf72-b5f555c45981-kube-api-access-csrgj\") pod \"4299d3fd-519c-45d7-bf72-b5f555c45981\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.705194 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4299d3fd-519c-45d7-bf72-b5f555c45981-host\") pod \"4299d3fd-519c-45d7-bf72-b5f555c45981\" (UID: \"4299d3fd-519c-45d7-bf72-b5f555c45981\") " Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.705358 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4299d3fd-519c-45d7-bf72-b5f555c45981-host" (OuterVolumeSpecName: "host") pod "4299d3fd-519c-45d7-bf72-b5f555c45981" (UID: "4299d3fd-519c-45d7-bf72-b5f555c45981"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.705813 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4299d3fd-519c-45d7-bf72-b5f555c45981-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.714342 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4299d3fd-519c-45d7-bf72-b5f555c45981-kube-api-access-csrgj" (OuterVolumeSpecName: "kube-api-access-csrgj") pod "4299d3fd-519c-45d7-bf72-b5f555c45981" (UID: "4299d3fd-519c-45d7-bf72-b5f555c45981"). InnerVolumeSpecName "kube-api-access-csrgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.808462 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csrgj\" (UniqueName: \"kubernetes.io/projected/4299d3fd-519c-45d7-bf72-b5f555c45981-kube-api-access-csrgj\") on node \"crc\" DevicePath \"\"" Jun 06 10:46:13 crc kubenswrapper[4911]: I0606 10:46:13.974632 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4299d3fd-519c-45d7-bf72-b5f555c45981" path="/var/lib/kubelet/pods/4299d3fd-519c-45d7-bf72-b5f555c45981/volumes" Jun 06 10:46:14 crc kubenswrapper[4911]: I0606 10:46:14.173352 4911 generic.go:334] "Generic (PLEG): container finished" podID="4299d3fd-519c-45d7-bf72-b5f555c45981" containerID="fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3" exitCode=0 Jun 06 10:46:14 crc kubenswrapper[4911]: I0606 10:46:14.173428 4911 scope.go:117] "RemoveContainer" containerID="fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3" Jun 06 10:46:14 crc kubenswrapper[4911]: I0606 10:46:14.173655 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-75dp4" Jun 06 10:46:14 crc kubenswrapper[4911]: I0606 10:46:14.202808 4911 scope.go:117] "RemoveContainer" containerID="fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3" Jun 06 10:46:14 crc kubenswrapper[4911]: E0606 10:46:14.204392 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3\": container with ID starting with fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3 not found: ID does not exist" containerID="fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3" Jun 06 10:46:14 crc kubenswrapper[4911]: I0606 10:46:14.204466 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3"} err="failed to get container status \"fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3\": rpc error: code = NotFound desc = could not find container \"fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3\": container with ID starting with fe9b50d7af1a422a4ccae18911898c4070f0a02ac963290e19a449a1505754d3 not found: ID does not exist" Jun 06 10:46:54 crc kubenswrapper[4911]: I0606 10:46:54.300237 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:46:54 crc kubenswrapper[4911]: I0606 10:46:54.300989 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:47:01 crc kubenswrapper[4911]: I0606 10:47:01.785085 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-pkjh8"] Jun 06 10:47:01 crc kubenswrapper[4911]: E0606 10:47:01.786614 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4299d3fd-519c-45d7-bf72-b5f555c45981" containerName="container-00" Jun 06 10:47:01 crc kubenswrapper[4911]: I0606 10:47:01.786634 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4299d3fd-519c-45d7-bf72-b5f555c45981" containerName="container-00" Jun 06 10:47:01 crc kubenswrapper[4911]: I0606 10:47:01.786860 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4299d3fd-519c-45d7-bf72-b5f555c45981" containerName="container-00" Jun 06 10:47:01 crc kubenswrapper[4911]: I0606 10:47:01.788032 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pkjh8" Jun 06 10:47:01 crc kubenswrapper[4911]: I0606 10:47:01.980786 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-host\") pod \"crc-debug-pkjh8\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " pod="openstack/crc-debug-pkjh8" Jun 06 10:47:01 crc kubenswrapper[4911]: I0606 10:47:01.981323 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqpsg\" (UniqueName: \"kubernetes.io/projected/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-kube-api-access-pqpsg\") pod \"crc-debug-pkjh8\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " pod="openstack/crc-debug-pkjh8" Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.084827 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqpsg\" (UniqueName: \"kubernetes.io/projected/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-kube-api-access-pqpsg\") pod \"crc-debug-pkjh8\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " pod="openstack/crc-debug-pkjh8" Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.084971 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-host\") pod \"crc-debug-pkjh8\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " pod="openstack/crc-debug-pkjh8" Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.085237 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-host\") pod \"crc-debug-pkjh8\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " pod="openstack/crc-debug-pkjh8" Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.109537 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqpsg\" (UniqueName: \"kubernetes.io/projected/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-kube-api-access-pqpsg\") pod \"crc-debug-pkjh8\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " pod="openstack/crc-debug-pkjh8" Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.115826 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pkjh8" Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.706557 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-pkjh8" event={"ID":"e9d6cff4-4ca9-41f8-b91f-26d4fba39338","Type":"ContainerStarted","Data":"fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6"} Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.707103 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-pkjh8" event={"ID":"e9d6cff4-4ca9-41f8-b91f-26d4fba39338","Type":"ContainerStarted","Data":"cfb0ffd92162c5473e803fee57d9ae7a985f0b8c54b24c7cebcad5f45cc1684b"} Jun 06 10:47:02 crc kubenswrapper[4911]: I0606 10:47:02.725577 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-pkjh8" podStartSLOduration=1.7255529040000002 podStartE2EDuration="1.725552904s" podCreationTimestamp="2025-06-06 10:47:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:47:02.722467325 +0000 UTC m=+5633.997892878" watchObservedRunningTime="2025-06-06 10:47:02.725552904 +0000 UTC m=+5634.000978447" Jun 06 10:47:12 crc kubenswrapper[4911]: I0606 10:47:12.989795 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-pkjh8"] Jun 06 10:47:12 crc kubenswrapper[4911]: I0606 10:47:12.991002 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-pkjh8" podUID="e9d6cff4-4ca9-41f8-b91f-26d4fba39338" containerName="container-00" containerID="cri-o://fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6" gracePeriod=2 Jun 06 10:47:12 crc kubenswrapper[4911]: I0606 10:47:12.994272 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-pkjh8"] Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.089569 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pkjh8" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.275581 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-host\") pod \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.275768 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqpsg\" (UniqueName: \"kubernetes.io/projected/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-kube-api-access-pqpsg\") pod \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\" (UID: \"e9d6cff4-4ca9-41f8-b91f-26d4fba39338\") " Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.275755 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-host" (OuterVolumeSpecName: "host") pod "e9d6cff4-4ca9-41f8-b91f-26d4fba39338" (UID: "e9d6cff4-4ca9-41f8-b91f-26d4fba39338"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.276771 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.284078 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-kube-api-access-pqpsg" (OuterVolumeSpecName: "kube-api-access-pqpsg") pod "e9d6cff4-4ca9-41f8-b91f-26d4fba39338" (UID: "e9d6cff4-4ca9-41f8-b91f-26d4fba39338"). InnerVolumeSpecName "kube-api-access-pqpsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.379805 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqpsg\" (UniqueName: \"kubernetes.io/projected/e9d6cff4-4ca9-41f8-b91f-26d4fba39338-kube-api-access-pqpsg\") on node \"crc\" DevicePath \"\"" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.834973 4911 generic.go:334] "Generic (PLEG): container finished" podID="e9d6cff4-4ca9-41f8-b91f-26d4fba39338" containerID="fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6" exitCode=0 Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.835552 4911 scope.go:117] "RemoveContainer" containerID="fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.835769 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-pkjh8" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.877703 4911 scope.go:117] "RemoveContainer" containerID="fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6" Jun 06 10:47:13 crc kubenswrapper[4911]: E0606 10:47:13.878273 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6\": container with ID starting with fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6 not found: ID does not exist" containerID="fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.878336 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6"} err="failed to get container status \"fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6\": rpc error: code = NotFound desc = could not find container \"fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6\": container with ID starting with fce4246fd16915090466e3548111713242435c13762dd4465060e28f542dd0c6 not found: ID does not exist" Jun 06 10:47:13 crc kubenswrapper[4911]: I0606 10:47:13.971174 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9d6cff4-4ca9-41f8-b91f-26d4fba39338" path="/var/lib/kubelet/pods/e9d6cff4-4ca9-41f8-b91f-26d4fba39338/volumes" Jun 06 10:47:24 crc kubenswrapper[4911]: I0606 10:47:24.300158 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:47:24 crc kubenswrapper[4911]: I0606 10:47:24.301001 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:47:54 crc kubenswrapper[4911]: I0606 10:47:54.300146 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:47:54 crc kubenswrapper[4911]: I0606 10:47:54.301033 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:47:54 crc kubenswrapper[4911]: I0606 10:47:54.301130 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:47:54 crc kubenswrapper[4911]: I0606 10:47:54.302329 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:47:54 crc kubenswrapper[4911]: I0606 10:47:54.302392 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" gracePeriod=600 Jun 06 10:47:54 crc kubenswrapper[4911]: E0606 10:47:54.427579 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:47:55 crc kubenswrapper[4911]: I0606 10:47:55.310374 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" exitCode=0 Jun 06 10:47:55 crc kubenswrapper[4911]: I0606 10:47:55.310445 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c"} Jun 06 10:47:55 crc kubenswrapper[4911]: I0606 10:47:55.311771 4911 scope.go:117] "RemoveContainer" containerID="ad549db7e80199891d6397a0a77c646ef8025ceab1a43370ad30a9aaeb95578d" Jun 06 10:47:55 crc kubenswrapper[4911]: I0606 10:47:55.312811 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:47:55 crc kubenswrapper[4911]: E0606 10:47:55.313171 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.445078 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-j9dzs"] Jun 06 10:48:02 crc kubenswrapper[4911]: E0606 10:48:02.446772 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d6cff4-4ca9-41f8-b91f-26d4fba39338" containerName="container-00" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.446798 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d6cff4-4ca9-41f8-b91f-26d4fba39338" containerName="container-00" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.447082 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9d6cff4-4ca9-41f8-b91f-26d4fba39338" containerName="container-00" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.448486 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j9dzs" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.607003 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfchv\" (UniqueName: \"kubernetes.io/projected/ed51f349-1527-436b-afc3-90b18fece2ac-kube-api-access-hfchv\") pod \"crc-debug-j9dzs\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " pod="openstack/crc-debug-j9dzs" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.607755 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed51f349-1527-436b-afc3-90b18fece2ac-host\") pod \"crc-debug-j9dzs\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " pod="openstack/crc-debug-j9dzs" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.710601 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed51f349-1527-436b-afc3-90b18fece2ac-host\") pod \"crc-debug-j9dzs\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " pod="openstack/crc-debug-j9dzs" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.710924 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfchv\" (UniqueName: \"kubernetes.io/projected/ed51f349-1527-436b-afc3-90b18fece2ac-kube-api-access-hfchv\") pod \"crc-debug-j9dzs\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " pod="openstack/crc-debug-j9dzs" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.711026 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed51f349-1527-436b-afc3-90b18fece2ac-host\") pod \"crc-debug-j9dzs\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " pod="openstack/crc-debug-j9dzs" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.735741 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfchv\" (UniqueName: \"kubernetes.io/projected/ed51f349-1527-436b-afc3-90b18fece2ac-kube-api-access-hfchv\") pod \"crc-debug-j9dzs\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " pod="openstack/crc-debug-j9dzs" Jun 06 10:48:02 crc kubenswrapper[4911]: I0606 10:48:02.785022 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j9dzs" Jun 06 10:48:03 crc kubenswrapper[4911]: I0606 10:48:03.409824 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-j9dzs" event={"ID":"ed51f349-1527-436b-afc3-90b18fece2ac","Type":"ContainerStarted","Data":"aecee8523b44ec6a4cae0722d0ba54573106f875d0bdbbde035c453b87165991"} Jun 06 10:48:03 crc kubenswrapper[4911]: I0606 10:48:03.410369 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-j9dzs" event={"ID":"ed51f349-1527-436b-afc3-90b18fece2ac","Type":"ContainerStarted","Data":"1a4b27462b07cefe79527d1092f45690ba0cab64ca8494b6659e033499776238"} Jun 06 10:48:03 crc kubenswrapper[4911]: I0606 10:48:03.433442 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-j9dzs" podStartSLOduration=1.433413737 podStartE2EDuration="1.433413737s" podCreationTimestamp="2025-06-06 10:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:48:03.428168112 +0000 UTC m=+5694.703593655" watchObservedRunningTime="2025-06-06 10:48:03.433413737 +0000 UTC m=+5694.708839280" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.537921 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-smslj"] Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.543375 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.550001 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-smslj"] Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.591326 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxggc\" (UniqueName: \"kubernetes.io/projected/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-kube-api-access-hxggc\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.591496 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-utilities\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.591550 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-catalog-content\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.693569 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxggc\" (UniqueName: \"kubernetes.io/projected/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-kube-api-access-hxggc\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.693714 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-utilities\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.693745 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-catalog-content\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.694942 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-utilities\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.694965 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-catalog-content\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.717232 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxggc\" (UniqueName: \"kubernetes.io/projected/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-kube-api-access-hxggc\") pod \"community-operators-smslj\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.883681 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:09 crc kubenswrapper[4911]: I0606 10:48:09.971520 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:48:09 crc kubenswrapper[4911]: E0606 10:48:09.971855 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:48:10 crc kubenswrapper[4911]: I0606 10:48:10.677027 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-smslj"] Jun 06 10:48:11 crc kubenswrapper[4911]: I0606 10:48:11.519445 4911 generic.go:334] "Generic (PLEG): container finished" podID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerID="919170b05c0708196173b6b421e832832ca8692ae4fbdbef41408bd5e7b4ee14" exitCode=0 Jun 06 10:48:11 crc kubenswrapper[4911]: I0606 10:48:11.519492 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smslj" event={"ID":"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07","Type":"ContainerDied","Data":"919170b05c0708196173b6b421e832832ca8692ae4fbdbef41408bd5e7b4ee14"} Jun 06 10:48:11 crc kubenswrapper[4911]: I0606 10:48:11.519843 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smslj" event={"ID":"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07","Type":"ContainerStarted","Data":"a940e1422eb4889ec4f50a027596c60af776b29e38972a083b650065ada9f49e"} Jun 06 10:48:11 crc kubenswrapper[4911]: I0606 10:48:11.522747 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:48:12 crc kubenswrapper[4911]: I0606 10:48:12.535471 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smslj" event={"ID":"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07","Type":"ContainerStarted","Data":"b3538ab694fd5f4029cbed8c18b09b2abda1760c09c4086bced31ffb00f3d7eb"} Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.454948 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-j9dzs"] Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.456079 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-j9dzs" podUID="ed51f349-1527-436b-afc3-90b18fece2ac" containerName="container-00" containerID="cri-o://aecee8523b44ec6a4cae0722d0ba54573106f875d0bdbbde035c453b87165991" gracePeriod=2 Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.464048 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-j9dzs"] Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.549406 4911 generic.go:334] "Generic (PLEG): container finished" podID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerID="b3538ab694fd5f4029cbed8c18b09b2abda1760c09c4086bced31ffb00f3d7eb" exitCode=0 Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.549508 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smslj" event={"ID":"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07","Type":"ContainerDied","Data":"b3538ab694fd5f4029cbed8c18b09b2abda1760c09c4086bced31ffb00f3d7eb"} Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.552254 4911 generic.go:334] "Generic (PLEG): container finished" podID="ed51f349-1527-436b-afc3-90b18fece2ac" containerID="aecee8523b44ec6a4cae0722d0ba54573106f875d0bdbbde035c453b87165991" exitCode=0 Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.552320 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a4b27462b07cefe79527d1092f45690ba0cab64ca8494b6659e033499776238" Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.562508 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j9dzs" Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.617278 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed51f349-1527-436b-afc3-90b18fece2ac-host\") pod \"ed51f349-1527-436b-afc3-90b18fece2ac\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.617427 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfchv\" (UniqueName: \"kubernetes.io/projected/ed51f349-1527-436b-afc3-90b18fece2ac-kube-api-access-hfchv\") pod \"ed51f349-1527-436b-afc3-90b18fece2ac\" (UID: \"ed51f349-1527-436b-afc3-90b18fece2ac\") " Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.617416 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed51f349-1527-436b-afc3-90b18fece2ac-host" (OuterVolumeSpecName: "host") pod "ed51f349-1527-436b-afc3-90b18fece2ac" (UID: "ed51f349-1527-436b-afc3-90b18fece2ac"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.618251 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ed51f349-1527-436b-afc3-90b18fece2ac-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.625927 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed51f349-1527-436b-afc3-90b18fece2ac-kube-api-access-hfchv" (OuterVolumeSpecName: "kube-api-access-hfchv") pod "ed51f349-1527-436b-afc3-90b18fece2ac" (UID: "ed51f349-1527-436b-afc3-90b18fece2ac"). InnerVolumeSpecName "kube-api-access-hfchv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.719908 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfchv\" (UniqueName: \"kubernetes.io/projected/ed51f349-1527-436b-afc3-90b18fece2ac-kube-api-access-hfchv\") on node \"crc\" DevicePath \"\"" Jun 06 10:48:13 crc kubenswrapper[4911]: I0606 10:48:13.966640 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed51f349-1527-436b-afc3-90b18fece2ac" path="/var/lib/kubelet/pods/ed51f349-1527-436b-afc3-90b18fece2ac/volumes" Jun 06 10:48:14 crc kubenswrapper[4911]: I0606 10:48:14.567235 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smslj" event={"ID":"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07","Type":"ContainerStarted","Data":"e1a7011e36b084e6fc4a67aa57edf486d208421af85e62e79ac699482f349429"} Jun 06 10:48:14 crc kubenswrapper[4911]: I0606 10:48:14.567302 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-j9dzs" Jun 06 10:48:14 crc kubenswrapper[4911]: I0606 10:48:14.594257 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-smslj" podStartSLOduration=2.998723537 podStartE2EDuration="5.59423649s" podCreationTimestamp="2025-06-06 10:48:09 +0000 UTC" firstStartedPulling="2025-06-06 10:48:11.522415753 +0000 UTC m=+5702.797841286" lastFinishedPulling="2025-06-06 10:48:14.117928686 +0000 UTC m=+5705.393354239" observedRunningTime="2025-06-06 10:48:14.588158773 +0000 UTC m=+5705.863584326" watchObservedRunningTime="2025-06-06 10:48:14.59423649 +0000 UTC m=+5705.869662023" Jun 06 10:48:19 crc kubenswrapper[4911]: I0606 10:48:19.884705 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:19 crc kubenswrapper[4911]: I0606 10:48:19.885542 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:19 crc kubenswrapper[4911]: I0606 10:48:19.943665 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:20 crc kubenswrapper[4911]: I0606 10:48:20.704764 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:20 crc kubenswrapper[4911]: I0606 10:48:20.761929 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-smslj"] Jun 06 10:48:21 crc kubenswrapper[4911]: I0606 10:48:21.947992 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:48:21 crc kubenswrapper[4911]: E0606 10:48:21.948962 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:48:22 crc kubenswrapper[4911]: I0606 10:48:22.654599 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-smslj" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="registry-server" containerID="cri-o://e1a7011e36b084e6fc4a67aa57edf486d208421af85e62e79ac699482f349429" gracePeriod=2 Jun 06 10:48:23 crc kubenswrapper[4911]: I0606 10:48:23.670380 4911 generic.go:334] "Generic (PLEG): container finished" podID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerID="e1a7011e36b084e6fc4a67aa57edf486d208421af85e62e79ac699482f349429" exitCode=0 Jun 06 10:48:23 crc kubenswrapper[4911]: I0606 10:48:23.670442 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smslj" event={"ID":"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07","Type":"ContainerDied","Data":"e1a7011e36b084e6fc4a67aa57edf486d208421af85e62e79ac699482f349429"} Jun 06 10:48:23 crc kubenswrapper[4911]: I0606 10:48:23.909210 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.111849 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-utilities\") pod \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.111904 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-catalog-content\") pod \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.111975 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxggc\" (UniqueName: \"kubernetes.io/projected/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-kube-api-access-hxggc\") pod \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\" (UID: \"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07\") " Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.113149 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-utilities" (OuterVolumeSpecName: "utilities") pod "4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" (UID: "4524e4e1-d62c-49f5-a6bf-faa61bdb1f07"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.120438 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-kube-api-access-hxggc" (OuterVolumeSpecName: "kube-api-access-hxggc") pod "4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" (UID: "4524e4e1-d62c-49f5-a6bf-faa61bdb1f07"). InnerVolumeSpecName "kube-api-access-hxggc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.209380 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" (UID: "4524e4e1-d62c-49f5-a6bf-faa61bdb1f07"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.215372 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.215399 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.215412 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxggc\" (UniqueName: \"kubernetes.io/projected/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07-kube-api-access-hxggc\") on node \"crc\" DevicePath \"\"" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.687237 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smslj" event={"ID":"4524e4e1-d62c-49f5-a6bf-faa61bdb1f07","Type":"ContainerDied","Data":"a940e1422eb4889ec4f50a027596c60af776b29e38972a083b650065ada9f49e"} Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.687763 4911 scope.go:117] "RemoveContainer" containerID="e1a7011e36b084e6fc4a67aa57edf486d208421af85e62e79ac699482f349429" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.687307 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smslj" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.723426 4911 scope.go:117] "RemoveContainer" containerID="b3538ab694fd5f4029cbed8c18b09b2abda1760c09c4086bced31ffb00f3d7eb" Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.732290 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-smslj"] Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.742822 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-smslj"] Jun 06 10:48:24 crc kubenswrapper[4911]: I0606 10:48:24.777613 4911 scope.go:117] "RemoveContainer" containerID="919170b05c0708196173b6b421e832832ca8692ae4fbdbef41408bd5e7b4ee14" Jun 06 10:48:25 crc kubenswrapper[4911]: I0606 10:48:25.962637 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" path="/var/lib/kubelet/pods/4524e4e1-d62c-49f5-a6bf-faa61bdb1f07/volumes" Jun 06 10:48:35 crc kubenswrapper[4911]: I0606 10:48:35.948710 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:48:35 crc kubenswrapper[4911]: E0606 10:48:35.949928 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:48:46 crc kubenswrapper[4911]: I0606 10:48:46.947728 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:48:46 crc kubenswrapper[4911]: E0606 10:48:46.950498 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.949196 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:49:01 crc kubenswrapper[4911]: E0606 10:49:01.950995 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.962876 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-6dfg8"] Jun 06 10:49:01 crc kubenswrapper[4911]: E0606 10:49:01.963582 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="extract-utilities" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.963659 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="extract-utilities" Jun 06 10:49:01 crc kubenswrapper[4911]: E0606 10:49:01.963747 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed51f349-1527-436b-afc3-90b18fece2ac" containerName="container-00" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.963803 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed51f349-1527-436b-afc3-90b18fece2ac" containerName="container-00" Jun 06 10:49:01 crc kubenswrapper[4911]: E0606 10:49:01.963883 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="registry-server" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.963940 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="registry-server" Jun 06 10:49:01 crc kubenswrapper[4911]: E0606 10:49:01.964017 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="extract-content" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.964072 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="extract-content" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.964511 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="4524e4e1-d62c-49f5-a6bf-faa61bdb1f07" containerName="registry-server" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.964705 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed51f349-1527-436b-afc3-90b18fece2ac" containerName="container-00" Jun 06 10:49:01 crc kubenswrapper[4911]: I0606 10:49:01.965692 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6dfg8" Jun 06 10:49:02 crc kubenswrapper[4911]: I0606 10:49:02.006241 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpx6b\" (UniqueName: \"kubernetes.io/projected/3f0fd2f2-074e-452d-964a-4ec3d225afe4-kube-api-access-wpx6b\") pod \"crc-debug-6dfg8\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " pod="openstack/crc-debug-6dfg8" Jun 06 10:49:02 crc kubenswrapper[4911]: I0606 10:49:02.006398 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3f0fd2f2-074e-452d-964a-4ec3d225afe4-host\") pod \"crc-debug-6dfg8\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " pod="openstack/crc-debug-6dfg8" Jun 06 10:49:02 crc kubenswrapper[4911]: I0606 10:49:02.109170 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3f0fd2f2-074e-452d-964a-4ec3d225afe4-host\") pod \"crc-debug-6dfg8\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " pod="openstack/crc-debug-6dfg8" Jun 06 10:49:02 crc kubenswrapper[4911]: I0606 10:49:02.109332 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3f0fd2f2-074e-452d-964a-4ec3d225afe4-host\") pod \"crc-debug-6dfg8\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " pod="openstack/crc-debug-6dfg8" Jun 06 10:49:02 crc kubenswrapper[4911]: I0606 10:49:02.110049 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpx6b\" (UniqueName: \"kubernetes.io/projected/3f0fd2f2-074e-452d-964a-4ec3d225afe4-kube-api-access-wpx6b\") pod \"crc-debug-6dfg8\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " pod="openstack/crc-debug-6dfg8" Jun 06 10:49:02 crc kubenswrapper[4911]: I0606 10:49:02.131660 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpx6b\" (UniqueName: \"kubernetes.io/projected/3f0fd2f2-074e-452d-964a-4ec3d225afe4-kube-api-access-wpx6b\") pod \"crc-debug-6dfg8\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " pod="openstack/crc-debug-6dfg8" Jun 06 10:49:02 crc kubenswrapper[4911]: I0606 10:49:02.290599 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6dfg8" Jun 06 10:49:03 crc kubenswrapper[4911]: I0606 10:49:03.105005 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6dfg8" event={"ID":"3f0fd2f2-074e-452d-964a-4ec3d225afe4","Type":"ContainerStarted","Data":"4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496"} Jun 06 10:49:03 crc kubenswrapper[4911]: I0606 10:49:03.105588 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6dfg8" event={"ID":"3f0fd2f2-074e-452d-964a-4ec3d225afe4","Type":"ContainerStarted","Data":"6e97d422587c3aa85293bf0d3997b835738697047c7e337fd5ca042cd17fed79"} Jun 06 10:49:03 crc kubenswrapper[4911]: I0606 10:49:03.127629 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-6dfg8" podStartSLOduration=2.127601946 podStartE2EDuration="2.127601946s" podCreationTimestamp="2025-06-06 10:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:49:03.120873412 +0000 UTC m=+5754.396298955" watchObservedRunningTime="2025-06-06 10:49:03.127601946 +0000 UTC m=+5754.403027489" Jun 06 10:49:12 crc kubenswrapper[4911]: I0606 10:49:12.987020 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-6dfg8"] Jun 06 10:49:12 crc kubenswrapper[4911]: I0606 10:49:12.988315 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-6dfg8" podUID="3f0fd2f2-074e-452d-964a-4ec3d225afe4" containerName="container-00" containerID="cri-o://4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496" gracePeriod=2 Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.001673 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-6dfg8"] Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.125926 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6dfg8" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.208310 4911 generic.go:334] "Generic (PLEG): container finished" podID="3f0fd2f2-074e-452d-964a-4ec3d225afe4" containerID="4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496" exitCode=0 Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.208370 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6dfg8" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.208389 4911 scope.go:117] "RemoveContainer" containerID="4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.236247 4911 scope.go:117] "RemoveContainer" containerID="4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496" Jun 06 10:49:13 crc kubenswrapper[4911]: E0606 10:49:13.236725 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496\": container with ID starting with 4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496 not found: ID does not exist" containerID="4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.236799 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496"} err="failed to get container status \"4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496\": rpc error: code = NotFound desc = could not find container \"4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496\": container with ID starting with 4bc10e9fa3fa99fe306ea3d18ece81a158a5444a78454c2e443aaa44b49de496 not found: ID does not exist" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.251694 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3f0fd2f2-074e-452d-964a-4ec3d225afe4-host\") pod \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.251788 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3f0fd2f2-074e-452d-964a-4ec3d225afe4-host" (OuterVolumeSpecName: "host") pod "3f0fd2f2-074e-452d-964a-4ec3d225afe4" (UID: "3f0fd2f2-074e-452d-964a-4ec3d225afe4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.251992 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpx6b\" (UniqueName: \"kubernetes.io/projected/3f0fd2f2-074e-452d-964a-4ec3d225afe4-kube-api-access-wpx6b\") pod \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\" (UID: \"3f0fd2f2-074e-452d-964a-4ec3d225afe4\") " Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.252815 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3f0fd2f2-074e-452d-964a-4ec3d225afe4-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.260478 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f0fd2f2-074e-452d-964a-4ec3d225afe4-kube-api-access-wpx6b" (OuterVolumeSpecName: "kube-api-access-wpx6b") pod "3f0fd2f2-074e-452d-964a-4ec3d225afe4" (UID: "3f0fd2f2-074e-452d-964a-4ec3d225afe4"). InnerVolumeSpecName "kube-api-access-wpx6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.355692 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpx6b\" (UniqueName: \"kubernetes.io/projected/3f0fd2f2-074e-452d-964a-4ec3d225afe4-kube-api-access-wpx6b\") on node \"crc\" DevicePath \"\"" Jun 06 10:49:13 crc kubenswrapper[4911]: I0606 10:49:13.961331 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f0fd2f2-074e-452d-964a-4ec3d225afe4" path="/var/lib/kubelet/pods/3f0fd2f2-074e-452d-964a-4ec3d225afe4/volumes" Jun 06 10:49:16 crc kubenswrapper[4911]: I0606 10:49:16.948631 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:49:16 crc kubenswrapper[4911]: E0606 10:49:16.949970 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:49:24 crc kubenswrapper[4911]: I0606 10:49:24.113545 4911 scope.go:117] "RemoveContainer" containerID="8710b066cfc9cd02fe16622b6af42df69d8cef38647e3b02293c48cac3491401" Jun 06 10:49:24 crc kubenswrapper[4911]: I0606 10:49:24.140701 4911 scope.go:117] "RemoveContainer" containerID="d152881f8b99313127242ec8ebfe146d347ac5204630a7bc21cf06eec3488f03" Jun 06 10:49:24 crc kubenswrapper[4911]: I0606 10:49:24.190786 4911 scope.go:117] "RemoveContainer" containerID="ea64a0745bd7f8c1018fedda77b0f8a4e6b4526554682797a6e07d8c69d33183" Jun 06 10:49:29 crc kubenswrapper[4911]: I0606 10:49:29.958267 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:49:29 crc kubenswrapper[4911]: E0606 10:49:29.959701 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:49:42 crc kubenswrapper[4911]: I0606 10:49:42.948383 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:49:42 crc kubenswrapper[4911]: E0606 10:49:42.949763 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:49:56 crc kubenswrapper[4911]: I0606 10:49:56.948608 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:49:56 crc kubenswrapper[4911]: E0606 10:49:56.949779 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.321700 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-4rmqr"] Jun 06 10:50:02 crc kubenswrapper[4911]: E0606 10:50:02.323127 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f0fd2f2-074e-452d-964a-4ec3d225afe4" containerName="container-00" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.323207 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f0fd2f2-074e-452d-964a-4ec3d225afe4" containerName="container-00" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.323418 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f0fd2f2-074e-452d-964a-4ec3d225afe4" containerName="container-00" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.324283 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.389123 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th56v\" (UniqueName: \"kubernetes.io/projected/2227a39d-b1d2-4215-830e-2910447d12b6-kube-api-access-th56v\") pod \"crc-debug-4rmqr\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.389394 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2227a39d-b1d2-4215-830e-2910447d12b6-host\") pod \"crc-debug-4rmqr\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.491943 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2227a39d-b1d2-4215-830e-2910447d12b6-host\") pod \"crc-debug-4rmqr\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.492056 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th56v\" (UniqueName: \"kubernetes.io/projected/2227a39d-b1d2-4215-830e-2910447d12b6-kube-api-access-th56v\") pod \"crc-debug-4rmqr\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.492140 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2227a39d-b1d2-4215-830e-2910447d12b6-host\") pod \"crc-debug-4rmqr\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.515683 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th56v\" (UniqueName: \"kubernetes.io/projected/2227a39d-b1d2-4215-830e-2910447d12b6-kube-api-access-th56v\") pod \"crc-debug-4rmqr\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.653180 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4rmqr" Jun 06 10:50:02 crc kubenswrapper[4911]: I0606 10:50:02.810260 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-4rmqr" event={"ID":"2227a39d-b1d2-4215-830e-2910447d12b6","Type":"ContainerStarted","Data":"face732551195788bb2e79cd323610edfb1a5d89c1798ccd2dc191ac80da7d70"} Jun 06 10:50:03 crc kubenswrapper[4911]: I0606 10:50:03.822585 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-4rmqr" event={"ID":"2227a39d-b1d2-4215-830e-2910447d12b6","Type":"ContainerStarted","Data":"882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b"} Jun 06 10:50:03 crc kubenswrapper[4911]: I0606 10:50:03.844843 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-4rmqr" podStartSLOduration=1.8448052499999998 podStartE2EDuration="1.84480525s" podCreationTimestamp="2025-06-06 10:50:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:50:03.842604433 +0000 UTC m=+5815.118029976" watchObservedRunningTime="2025-06-06 10:50:03.84480525 +0000 UTC m=+5815.120230833" Jun 06 10:50:07 crc kubenswrapper[4911]: I0606 10:50:07.949180 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:50:07 crc kubenswrapper[4911]: E0606 10:50:07.950429 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.533629 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-4rmqr"] Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.534750 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-4rmqr" podUID="2227a39d-b1d2-4215-830e-2910447d12b6" containerName="container-00" containerID="cri-o://882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b" gracePeriod=2 Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.547933 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-4rmqr"] Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.635317 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4rmqr" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.779006 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2227a39d-b1d2-4215-830e-2910447d12b6-host\") pod \"2227a39d-b1d2-4215-830e-2910447d12b6\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.779178 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-th56v\" (UniqueName: \"kubernetes.io/projected/2227a39d-b1d2-4215-830e-2910447d12b6-kube-api-access-th56v\") pod \"2227a39d-b1d2-4215-830e-2910447d12b6\" (UID: \"2227a39d-b1d2-4215-830e-2910447d12b6\") " Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.780868 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2227a39d-b1d2-4215-830e-2910447d12b6-host" (OuterVolumeSpecName: "host") pod "2227a39d-b1d2-4215-830e-2910447d12b6" (UID: "2227a39d-b1d2-4215-830e-2910447d12b6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.785920 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2227a39d-b1d2-4215-830e-2910447d12b6-kube-api-access-th56v" (OuterVolumeSpecName: "kube-api-access-th56v") pod "2227a39d-b1d2-4215-830e-2910447d12b6" (UID: "2227a39d-b1d2-4215-830e-2910447d12b6"). InnerVolumeSpecName "kube-api-access-th56v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.883779 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2227a39d-b1d2-4215-830e-2910447d12b6-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.883842 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-th56v\" (UniqueName: \"kubernetes.io/projected/2227a39d-b1d2-4215-830e-2910447d12b6-kube-api-access-th56v\") on node \"crc\" DevicePath \"\"" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.934458 4911 generic.go:334] "Generic (PLEG): container finished" podID="2227a39d-b1d2-4215-830e-2910447d12b6" containerID="882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b" exitCode=0 Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.935366 4911 scope.go:117] "RemoveContainer" containerID="882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.935837 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-4rmqr" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.970227 4911 scope.go:117] "RemoveContainer" containerID="882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b" Jun 06 10:50:13 crc kubenswrapper[4911]: E0606 10:50:13.970816 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b\": container with ID starting with 882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b not found: ID does not exist" containerID="882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.970871 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b"} err="failed to get container status \"882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b\": rpc error: code = NotFound desc = could not find container \"882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b\": container with ID starting with 882817d9379fc0ca58790c89c02a44487691c086c9a6a6a06078053ff123429b not found: ID does not exist" Jun 06 10:50:13 crc kubenswrapper[4911]: I0606 10:50:13.973912 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2227a39d-b1d2-4215-830e-2910447d12b6" path="/var/lib/kubelet/pods/2227a39d-b1d2-4215-830e-2910447d12b6/volumes" Jun 06 10:50:19 crc kubenswrapper[4911]: I0606 10:50:19.957264 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:50:19 crc kubenswrapper[4911]: E0606 10:50:19.970087 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:50:32 crc kubenswrapper[4911]: I0606 10:50:32.948915 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:50:32 crc kubenswrapper[4911]: E0606 10:50:32.950325 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:50:45 crc kubenswrapper[4911]: I0606 10:50:45.951898 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:50:45 crc kubenswrapper[4911]: E0606 10:50:45.952692 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:50:58 crc kubenswrapper[4911]: I0606 10:50:58.948807 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:50:58 crc kubenswrapper[4911]: E0606 10:50:58.949894 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:51:01 crc kubenswrapper[4911]: I0606 10:51:01.971024 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-tcbqj"] Jun 06 10:51:01 crc kubenswrapper[4911]: E0606 10:51:01.972701 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2227a39d-b1d2-4215-830e-2910447d12b6" containerName="container-00" Jun 06 10:51:01 crc kubenswrapper[4911]: I0606 10:51:01.972729 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="2227a39d-b1d2-4215-830e-2910447d12b6" containerName="container-00" Jun 06 10:51:01 crc kubenswrapper[4911]: I0606 10:51:01.972958 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="2227a39d-b1d2-4215-830e-2910447d12b6" containerName="container-00" Jun 06 10:51:01 crc kubenswrapper[4911]: I0606 10:51:01.973820 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.067761 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-host\") pod \"crc-debug-tcbqj\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.067995 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzrwz\" (UniqueName: \"kubernetes.io/projected/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-kube-api-access-bzrwz\") pod \"crc-debug-tcbqj\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.173427 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzrwz\" (UniqueName: \"kubernetes.io/projected/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-kube-api-access-bzrwz\") pod \"crc-debug-tcbqj\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.173623 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-host\") pod \"crc-debug-tcbqj\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.173832 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-host\") pod \"crc-debug-tcbqj\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.197737 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzrwz\" (UniqueName: \"kubernetes.io/projected/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-kube-api-access-bzrwz\") pod \"crc-debug-tcbqj\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.296474 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tcbqj" Jun 06 10:51:02 crc kubenswrapper[4911]: I0606 10:51:02.479536 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tcbqj" event={"ID":"9fbe2872-2f4e-4c9f-895a-5f851afe13c3","Type":"ContainerStarted","Data":"8157d8a06404b8d0b1e14b87442ac73d7a8a501137234609a90d1d3c1114fa47"} Jun 06 10:51:03 crc kubenswrapper[4911]: I0606 10:51:03.490789 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tcbqj" event={"ID":"9fbe2872-2f4e-4c9f-895a-5f851afe13c3","Type":"ContainerStarted","Data":"245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4"} Jun 06 10:51:03 crc kubenswrapper[4911]: I0606 10:51:03.511223 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-tcbqj" podStartSLOduration=2.511196001 podStartE2EDuration="2.511196001s" podCreationTimestamp="2025-06-06 10:51:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:51:03.504671982 +0000 UTC m=+5874.780097525" watchObservedRunningTime="2025-06-06 10:51:03.511196001 +0000 UTC m=+5874.786621544" Jun 06 10:51:12 crc kubenswrapper[4911]: I0606 10:51:12.985815 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-tcbqj"] Jun 06 10:51:12 crc kubenswrapper[4911]: I0606 10:51:12.986974 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-tcbqj" podUID="9fbe2872-2f4e-4c9f-895a-5f851afe13c3" containerName="container-00" containerID="cri-o://245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4" gracePeriod=2 Jun 06 10:51:12 crc kubenswrapper[4911]: I0606 10:51:12.998738 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-tcbqj"] Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.100813 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tcbqj" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.237930 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzrwz\" (UniqueName: \"kubernetes.io/projected/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-kube-api-access-bzrwz\") pod \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.238912 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-host\") pod \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\" (UID: \"9fbe2872-2f4e-4c9f-895a-5f851afe13c3\") " Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.238995 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-host" (OuterVolumeSpecName: "host") pod "9fbe2872-2f4e-4c9f-895a-5f851afe13c3" (UID: "9fbe2872-2f4e-4c9f-895a-5f851afe13c3"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.239982 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.245154 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-kube-api-access-bzrwz" (OuterVolumeSpecName: "kube-api-access-bzrwz") pod "9fbe2872-2f4e-4c9f-895a-5f851afe13c3" (UID: "9fbe2872-2f4e-4c9f-895a-5f851afe13c3"). InnerVolumeSpecName "kube-api-access-bzrwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.341801 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzrwz\" (UniqueName: \"kubernetes.io/projected/9fbe2872-2f4e-4c9f-895a-5f851afe13c3-kube-api-access-bzrwz\") on node \"crc\" DevicePath \"\"" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.598176 4911 generic.go:334] "Generic (PLEG): container finished" podID="9fbe2872-2f4e-4c9f-895a-5f851afe13c3" containerID="245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4" exitCode=0 Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.598250 4911 scope.go:117] "RemoveContainer" containerID="245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.598313 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tcbqj" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.633504 4911 scope.go:117] "RemoveContainer" containerID="245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4" Jun 06 10:51:13 crc kubenswrapper[4911]: E0606 10:51:13.636858 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4\": container with ID starting with 245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4 not found: ID does not exist" containerID="245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.636929 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4"} err="failed to get container status \"245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4\": rpc error: code = NotFound desc = could not find container \"245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4\": container with ID starting with 245ca190bdf81ea35e174303fda4b555fd12f0a2f6e39075847e6aff7ed96cb4 not found: ID does not exist" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.947829 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:51:13 crc kubenswrapper[4911]: E0606 10:51:13.948449 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:51:13 crc kubenswrapper[4911]: I0606 10:51:13.959633 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fbe2872-2f4e-4c9f-895a-5f851afe13c3" path="/var/lib/kubelet/pods/9fbe2872-2f4e-4c9f-895a-5f851afe13c3/volumes" Jun 06 10:51:28 crc kubenswrapper[4911]: I0606 10:51:28.948941 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:51:28 crc kubenswrapper[4911]: E0606 10:51:28.950271 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:51:43 crc kubenswrapper[4911]: I0606 10:51:43.948884 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:51:43 crc kubenswrapper[4911]: E0606 10:51:43.949936 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:51:55 crc kubenswrapper[4911]: I0606 10:51:55.948675 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:51:55 crc kubenswrapper[4911]: E0606 10:51:55.950715 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.421697 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-dlxpk"] Jun 06 10:52:02 crc kubenswrapper[4911]: E0606 10:52:02.422963 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fbe2872-2f4e-4c9f-895a-5f851afe13c3" containerName="container-00" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.422989 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fbe2872-2f4e-4c9f-895a-5f851afe13c3" containerName="container-00" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.423284 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fbe2872-2f4e-4c9f-895a-5f851afe13c3" containerName="container-00" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.424553 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dlxpk" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.554230 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-577p4\" (UniqueName: \"kubernetes.io/projected/143d575e-c87d-41d5-92e3-ea7ae3361863-kube-api-access-577p4\") pod \"crc-debug-dlxpk\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " pod="openstack/crc-debug-dlxpk" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.554342 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/143d575e-c87d-41d5-92e3-ea7ae3361863-host\") pod \"crc-debug-dlxpk\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " pod="openstack/crc-debug-dlxpk" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.657107 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-577p4\" (UniqueName: \"kubernetes.io/projected/143d575e-c87d-41d5-92e3-ea7ae3361863-kube-api-access-577p4\") pod \"crc-debug-dlxpk\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " pod="openstack/crc-debug-dlxpk" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.657216 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/143d575e-c87d-41d5-92e3-ea7ae3361863-host\") pod \"crc-debug-dlxpk\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " pod="openstack/crc-debug-dlxpk" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.657372 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/143d575e-c87d-41d5-92e3-ea7ae3361863-host\") pod \"crc-debug-dlxpk\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " pod="openstack/crc-debug-dlxpk" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.682285 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-577p4\" (UniqueName: \"kubernetes.io/projected/143d575e-c87d-41d5-92e3-ea7ae3361863-kube-api-access-577p4\") pod \"crc-debug-dlxpk\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " pod="openstack/crc-debug-dlxpk" Jun 06 10:52:02 crc kubenswrapper[4911]: I0606 10:52:02.746539 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dlxpk" Jun 06 10:52:03 crc kubenswrapper[4911]: I0606 10:52:03.108407 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dlxpk" event={"ID":"143d575e-c87d-41d5-92e3-ea7ae3361863","Type":"ContainerStarted","Data":"d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f"} Jun 06 10:52:03 crc kubenswrapper[4911]: I0606 10:52:03.108497 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-dlxpk" event={"ID":"143d575e-c87d-41d5-92e3-ea7ae3361863","Type":"ContainerStarted","Data":"226dcdbdc6be1d2b3fe62691821bb23b774aad24e964f4560fa57af3b45abd37"} Jun 06 10:52:03 crc kubenswrapper[4911]: I0606 10:52:03.129889 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-dlxpk" podStartSLOduration=1.129867311 podStartE2EDuration="1.129867311s" podCreationTimestamp="2025-06-06 10:52:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:52:03.121875685 +0000 UTC m=+5934.397301238" watchObservedRunningTime="2025-06-06 10:52:03.129867311 +0000 UTC m=+5934.405292854" Jun 06 10:52:07 crc kubenswrapper[4911]: I0606 10:52:07.948461 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:52:07 crc kubenswrapper[4911]: E0606 10:52:07.949241 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.611073 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-dlxpk"] Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.612140 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-dlxpk" podUID="143d575e-c87d-41d5-92e3-ea7ae3361863" containerName="container-00" containerID="cri-o://d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f" gracePeriod=2 Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.622795 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-dlxpk"] Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.822790 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dlxpk" Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.909719 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-577p4\" (UniqueName: \"kubernetes.io/projected/143d575e-c87d-41d5-92e3-ea7ae3361863-kube-api-access-577p4\") pod \"143d575e-c87d-41d5-92e3-ea7ae3361863\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.909835 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/143d575e-c87d-41d5-92e3-ea7ae3361863-host\") pod \"143d575e-c87d-41d5-92e3-ea7ae3361863\" (UID: \"143d575e-c87d-41d5-92e3-ea7ae3361863\") " Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.910175 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/143d575e-c87d-41d5-92e3-ea7ae3361863-host" (OuterVolumeSpecName: "host") pod "143d575e-c87d-41d5-92e3-ea7ae3361863" (UID: "143d575e-c87d-41d5-92e3-ea7ae3361863"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.910610 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/143d575e-c87d-41d5-92e3-ea7ae3361863-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.921419 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/143d575e-c87d-41d5-92e3-ea7ae3361863-kube-api-access-577p4" (OuterVolumeSpecName: "kube-api-access-577p4") pod "143d575e-c87d-41d5-92e3-ea7ae3361863" (UID: "143d575e-c87d-41d5-92e3-ea7ae3361863"). InnerVolumeSpecName "kube-api-access-577p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:52:13 crc kubenswrapper[4911]: I0606 10:52:13.964587 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="143d575e-c87d-41d5-92e3-ea7ae3361863" path="/var/lib/kubelet/pods/143d575e-c87d-41d5-92e3-ea7ae3361863/volumes" Jun 06 10:52:14 crc kubenswrapper[4911]: I0606 10:52:14.012424 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-577p4\" (UniqueName: \"kubernetes.io/projected/143d575e-c87d-41d5-92e3-ea7ae3361863-kube-api-access-577p4\") on node \"crc\" DevicePath \"\"" Jun 06 10:52:14 crc kubenswrapper[4911]: I0606 10:52:14.217791 4911 generic.go:334] "Generic (PLEG): container finished" podID="143d575e-c87d-41d5-92e3-ea7ae3361863" containerID="d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f" exitCode=0 Jun 06 10:52:14 crc kubenswrapper[4911]: I0606 10:52:14.217857 4911 scope.go:117] "RemoveContainer" containerID="d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f" Jun 06 10:52:14 crc kubenswrapper[4911]: I0606 10:52:14.217856 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-dlxpk" Jun 06 10:52:14 crc kubenswrapper[4911]: I0606 10:52:14.240348 4911 scope.go:117] "RemoveContainer" containerID="d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f" Jun 06 10:52:14 crc kubenswrapper[4911]: E0606 10:52:14.241009 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f\": container with ID starting with d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f not found: ID does not exist" containerID="d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f" Jun 06 10:52:14 crc kubenswrapper[4911]: I0606 10:52:14.241067 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f"} err="failed to get container status \"d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f\": rpc error: code = NotFound desc = could not find container \"d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f\": container with ID starting with d5f7d8aaa448a4088f34f1a3eb7f30e6d5148eb87d0b1b02c1686f6d8ee1957f not found: ID does not exist" Jun 06 10:52:21 crc kubenswrapper[4911]: I0606 10:52:21.948225 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:52:21 crc kubenswrapper[4911]: E0606 10:52:21.949368 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:52:32 crc kubenswrapper[4911]: I0606 10:52:32.949423 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:52:32 crc kubenswrapper[4911]: E0606 10:52:32.950895 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:52:46 crc kubenswrapper[4911]: I0606 10:52:46.948550 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:52:46 crc kubenswrapper[4911]: E0606 10:52:46.951234 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:52:58 crc kubenswrapper[4911]: I0606 10:52:58.948911 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:52:59 crc kubenswrapper[4911]: I0606 10:52:59.702464 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"e88856e5e2ab29572a14d0db7f394f9fe00a7119de7168db32b1ebc14063c4a9"} Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.041468 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-5v627"] Jun 06 10:53:02 crc kubenswrapper[4911]: E0606 10:53:02.042555 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="143d575e-c87d-41d5-92e3-ea7ae3361863" containerName="container-00" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.042573 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="143d575e-c87d-41d5-92e3-ea7ae3361863" containerName="container-00" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.042846 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="143d575e-c87d-41d5-92e3-ea7ae3361863" containerName="container-00" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.043698 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.216626 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9280fe54-f080-4072-8396-c17df6ab3828-host\") pod \"crc-debug-5v627\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.216937 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gns66\" (UniqueName: \"kubernetes.io/projected/9280fe54-f080-4072-8396-c17df6ab3828-kube-api-access-gns66\") pod \"crc-debug-5v627\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.319743 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gns66\" (UniqueName: \"kubernetes.io/projected/9280fe54-f080-4072-8396-c17df6ab3828-kube-api-access-gns66\") pod \"crc-debug-5v627\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.319898 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9280fe54-f080-4072-8396-c17df6ab3828-host\") pod \"crc-debug-5v627\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.320035 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9280fe54-f080-4072-8396-c17df6ab3828-host\") pod \"crc-debug-5v627\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.348540 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gns66\" (UniqueName: \"kubernetes.io/projected/9280fe54-f080-4072-8396-c17df6ab3828-kube-api-access-gns66\") pod \"crc-debug-5v627\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.366543 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5v627" Jun 06 10:53:02 crc kubenswrapper[4911]: W0606 10:53:02.410995 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9280fe54_f080_4072_8396_c17df6ab3828.slice/crio-ee6a927864bd06b7ae27368816f5d9092c828fcb4b4dfa8427a51c2ca74e810b WatchSource:0}: Error finding container ee6a927864bd06b7ae27368816f5d9092c828fcb4b4dfa8427a51c2ca74e810b: Status 404 returned error can't find the container with id ee6a927864bd06b7ae27368816f5d9092c828fcb4b4dfa8427a51c2ca74e810b Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.746678 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-5v627" event={"ID":"9280fe54-f080-4072-8396-c17df6ab3828","Type":"ContainerStarted","Data":"816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17"} Jun 06 10:53:02 crc kubenswrapper[4911]: I0606 10:53:02.747289 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-5v627" event={"ID":"9280fe54-f080-4072-8396-c17df6ab3828","Type":"ContainerStarted","Data":"ee6a927864bd06b7ae27368816f5d9092c828fcb4b4dfa8427a51c2ca74e810b"} Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.627266 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-5v627" podStartSLOduration=9.627241417 podStartE2EDuration="9.627241417s" podCreationTimestamp="2025-06-06 10:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:53:02.769430138 +0000 UTC m=+5994.044855671" watchObservedRunningTime="2025-06-06 10:53:11.627241417 +0000 UTC m=+6002.902666960" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.640938 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wwbp8"] Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.643128 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.658761 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwbp8"] Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.727147 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hv86\" (UniqueName: \"kubernetes.io/projected/37d6de86-1607-4b21-88bd-6c8962929ce1-kube-api-access-7hv86\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.727585 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-utilities\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.727907 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-catalog-content\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.830800 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-utilities\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.830940 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-catalog-content\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.831063 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hv86\" (UniqueName: \"kubernetes.io/projected/37d6de86-1607-4b21-88bd-6c8962929ce1-kube-api-access-7hv86\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.831410 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-utilities\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.831623 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-catalog-content\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.862609 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hv86\" (UniqueName: \"kubernetes.io/projected/37d6de86-1607-4b21-88bd-6c8962929ce1-kube-api-access-7hv86\") pod \"certified-operators-wwbp8\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:11 crc kubenswrapper[4911]: I0606 10:53:11.967734 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:12 crc kubenswrapper[4911]: I0606 10:53:12.663580 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwbp8"] Jun 06 10:53:12 crc kubenswrapper[4911]: I0606 10:53:12.839717 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwbp8" event={"ID":"37d6de86-1607-4b21-88bd-6c8962929ce1","Type":"ContainerStarted","Data":"a577b341b7184d0cf00919cad674ee439737ab242a972e88def2fcd5cfb4a39f"} Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.030875 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-5v627"] Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.031397 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-5v627" podUID="9280fe54-f080-4072-8396-c17df6ab3828" containerName="container-00" containerID="cri-o://816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17" gracePeriod=2 Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.046918 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-5v627"] Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.138691 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5v627" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.263841 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gns66\" (UniqueName: \"kubernetes.io/projected/9280fe54-f080-4072-8396-c17df6ab3828-kube-api-access-gns66\") pod \"9280fe54-f080-4072-8396-c17df6ab3828\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.263912 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9280fe54-f080-4072-8396-c17df6ab3828-host\") pod \"9280fe54-f080-4072-8396-c17df6ab3828\" (UID: \"9280fe54-f080-4072-8396-c17df6ab3828\") " Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.264004 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9280fe54-f080-4072-8396-c17df6ab3828-host" (OuterVolumeSpecName: "host") pod "9280fe54-f080-4072-8396-c17df6ab3828" (UID: "9280fe54-f080-4072-8396-c17df6ab3828"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.265124 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9280fe54-f080-4072-8396-c17df6ab3828-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.271636 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9280fe54-f080-4072-8396-c17df6ab3828-kube-api-access-gns66" (OuterVolumeSpecName: "kube-api-access-gns66") pod "9280fe54-f080-4072-8396-c17df6ab3828" (UID: "9280fe54-f080-4072-8396-c17df6ab3828"). InnerVolumeSpecName "kube-api-access-gns66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.367416 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gns66\" (UniqueName: \"kubernetes.io/projected/9280fe54-f080-4072-8396-c17df6ab3828-kube-api-access-gns66\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.855228 4911 generic.go:334] "Generic (PLEG): container finished" podID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerID="2fbbc1bbb41baf95021246bd05160c84d5e6053a83f1b8f9cfc4a97b09c41621" exitCode=0 Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.855387 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwbp8" event={"ID":"37d6de86-1607-4b21-88bd-6c8962929ce1","Type":"ContainerDied","Data":"2fbbc1bbb41baf95021246bd05160c84d5e6053a83f1b8f9cfc4a97b09c41621"} Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.857946 4911 generic.go:334] "Generic (PLEG): container finished" podID="9280fe54-f080-4072-8396-c17df6ab3828" containerID="816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17" exitCode=0 Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.858005 4911 scope.go:117] "RemoveContainer" containerID="816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.858146 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-5v627" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.860197 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.887552 4911 scope.go:117] "RemoveContainer" containerID="816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17" Jun 06 10:53:13 crc kubenswrapper[4911]: E0606 10:53:13.888628 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17\": container with ID starting with 816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17 not found: ID does not exist" containerID="816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.888695 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17"} err="failed to get container status \"816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17\": rpc error: code = NotFound desc = could not find container \"816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17\": container with ID starting with 816f2c59f09b2d1e625040a2d84b6be98b17f9611b5f97aad4998c26e5738b17 not found: ID does not exist" Jun 06 10:53:13 crc kubenswrapper[4911]: I0606 10:53:13.962027 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9280fe54-f080-4072-8396-c17df6ab3828" path="/var/lib/kubelet/pods/9280fe54-f080-4072-8396-c17df6ab3828/volumes" Jun 06 10:53:15 crc kubenswrapper[4911]: I0606 10:53:15.892946 4911 generic.go:334] "Generic (PLEG): container finished" podID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerID="c3a163c52cc055a9fe657408359fd92ea640fd3f96a267d9fee6628dc42d1e57" exitCode=0 Jun 06 10:53:15 crc kubenswrapper[4911]: I0606 10:53:15.893031 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwbp8" event={"ID":"37d6de86-1607-4b21-88bd-6c8962929ce1","Type":"ContainerDied","Data":"c3a163c52cc055a9fe657408359fd92ea640fd3f96a267d9fee6628dc42d1e57"} Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.438309 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2d68j"] Jun 06 10:53:16 crc kubenswrapper[4911]: E0606 10:53:16.441218 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9280fe54-f080-4072-8396-c17df6ab3828" containerName="container-00" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.441261 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9280fe54-f080-4072-8396-c17df6ab3828" containerName="container-00" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.441531 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9280fe54-f080-4072-8396-c17df6ab3828" containerName="container-00" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.443319 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.457538 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2d68j"] Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.547126 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-catalog-content\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.547650 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6c9c\" (UniqueName: \"kubernetes.io/projected/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-kube-api-access-q6c9c\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.547756 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-utilities\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.650431 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-catalog-content\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.650691 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6c9c\" (UniqueName: \"kubernetes.io/projected/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-kube-api-access-q6c9c\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.650833 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-utilities\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.651214 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-catalog-content\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.651495 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-utilities\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.678812 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6c9c\" (UniqueName: \"kubernetes.io/projected/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-kube-api-access-q6c9c\") pod \"redhat-marketplace-2d68j\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:16 crc kubenswrapper[4911]: I0606 10:53:16.775447 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.033730 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rtkzz"] Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.038876 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.059034 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rtkzz"] Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.163612 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-utilities\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.164191 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4z2r\" (UniqueName: \"kubernetes.io/projected/8159172f-a989-4f49-a61a-817712278ad5-kube-api-access-p4z2r\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.164301 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-catalog-content\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.266760 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-utilities\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.266939 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4z2r\" (UniqueName: \"kubernetes.io/projected/8159172f-a989-4f49-a61a-817712278ad5-kube-api-access-p4z2r\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.267029 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-catalog-content\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.267640 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-utilities\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.267674 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-catalog-content\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.302303 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4z2r\" (UniqueName: \"kubernetes.io/projected/8159172f-a989-4f49-a61a-817712278ad5-kube-api-access-p4z2r\") pod \"redhat-operators-rtkzz\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.376107 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.536842 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2d68j"] Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.927656 4911 generic.go:334] "Generic (PLEG): container finished" podID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerID="bdcbab9065682cff266740e656b290cc01f3c5c029c4b07c8d6cf077deb5d907" exitCode=0 Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.928204 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2d68j" event={"ID":"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96","Type":"ContainerDied","Data":"bdcbab9065682cff266740e656b290cc01f3c5c029c4b07c8d6cf077deb5d907"} Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.928267 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2d68j" event={"ID":"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96","Type":"ContainerStarted","Data":"67e54b5b10c9595559a081e6d33be2da7a4424632ebbbf4e46752fb35518a1b0"} Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.933849 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwbp8" event={"ID":"37d6de86-1607-4b21-88bd-6c8962929ce1","Type":"ContainerStarted","Data":"3ea995d15335ef0b8aa9da9841c368a8bd3591fb7c3ea01d94ee22acc2c3d0fb"} Jun 06 10:53:17 crc kubenswrapper[4911]: I0606 10:53:17.994189 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wwbp8" podStartSLOduration=4.369821959 podStartE2EDuration="6.994160885s" podCreationTimestamp="2025-06-06 10:53:11 +0000 UTC" firstStartedPulling="2025-06-06 10:53:13.859921436 +0000 UTC m=+6005.135346979" lastFinishedPulling="2025-06-06 10:53:16.484260362 +0000 UTC m=+6007.759685905" observedRunningTime="2025-06-06 10:53:17.991712041 +0000 UTC m=+6009.267137634" watchObservedRunningTime="2025-06-06 10:53:17.994160885 +0000 UTC m=+6009.269586418" Jun 06 10:53:18 crc kubenswrapper[4911]: I0606 10:53:18.147684 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rtkzz"] Jun 06 10:53:18 crc kubenswrapper[4911]: I0606 10:53:18.945503 4911 generic.go:334] "Generic (PLEG): container finished" podID="8159172f-a989-4f49-a61a-817712278ad5" containerID="eaa06a3f497fa63fcc3105f8b034fa29c390e658602c76c4a7a7c8788672131d" exitCode=0 Jun 06 10:53:18 crc kubenswrapper[4911]: I0606 10:53:18.945894 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtkzz" event={"ID":"8159172f-a989-4f49-a61a-817712278ad5","Type":"ContainerDied","Data":"eaa06a3f497fa63fcc3105f8b034fa29c390e658602c76c4a7a7c8788672131d"} Jun 06 10:53:18 crc kubenswrapper[4911]: I0606 10:53:18.945934 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtkzz" event={"ID":"8159172f-a989-4f49-a61a-817712278ad5","Type":"ContainerStarted","Data":"958af9d948245c422b75391947208c25bf44b48e2f386a256dbba8c493cbd39b"} Jun 06 10:53:19 crc kubenswrapper[4911]: I0606 10:53:19.960224 4911 generic.go:334] "Generic (PLEG): container finished" podID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerID="4d8a38d7c10352f6432658ff5866f614602dcc537ba46638afe74118ceb4aa56" exitCode=0 Jun 06 10:53:19 crc kubenswrapper[4911]: I0606 10:53:19.960830 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2d68j" event={"ID":"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96","Type":"ContainerDied","Data":"4d8a38d7c10352f6432658ff5866f614602dcc537ba46638afe74118ceb4aa56"} Jun 06 10:53:21 crc kubenswrapper[4911]: I0606 10:53:21.968980 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:21 crc kubenswrapper[4911]: I0606 10:53:21.970258 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:21 crc kubenswrapper[4911]: I0606 10:53:21.987727 4911 generic.go:334] "Generic (PLEG): container finished" podID="8159172f-a989-4f49-a61a-817712278ad5" containerID="41ec2301a4e67afdf65ad0ba12159fb72a690aac1732d7504ee2f43dd791ddec" exitCode=0 Jun 06 10:53:21 crc kubenswrapper[4911]: I0606 10:53:21.987832 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtkzz" event={"ID":"8159172f-a989-4f49-a61a-817712278ad5","Type":"ContainerDied","Data":"41ec2301a4e67afdf65ad0ba12159fb72a690aac1732d7504ee2f43dd791ddec"} Jun 06 10:53:21 crc kubenswrapper[4911]: I0606 10:53:21.992303 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2d68j" event={"ID":"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96","Type":"ContainerStarted","Data":"2029d59d04e5fbc97a9e90e810fd0596e485d173465cf8115540b6c200cf5547"} Jun 06 10:53:22 crc kubenswrapper[4911]: I0606 10:53:22.030349 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:22 crc kubenswrapper[4911]: I0606 10:53:22.045588 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2d68j" podStartSLOduration=3.061078231 podStartE2EDuration="6.045555764s" podCreationTimestamp="2025-06-06 10:53:16 +0000 UTC" firstStartedPulling="2025-06-06 10:53:17.931854626 +0000 UTC m=+6009.207280189" lastFinishedPulling="2025-06-06 10:53:20.916332179 +0000 UTC m=+6012.191757722" observedRunningTime="2025-06-06 10:53:22.03917365 +0000 UTC m=+6013.314599233" watchObservedRunningTime="2025-06-06 10:53:22.045555764 +0000 UTC m=+6013.320981317" Jun 06 10:53:23 crc kubenswrapper[4911]: I0606 10:53:23.064519 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:26 crc kubenswrapper[4911]: I0606 10:53:26.016850 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wwbp8"] Jun 06 10:53:26 crc kubenswrapper[4911]: I0606 10:53:26.017619 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wwbp8" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="registry-server" containerID="cri-o://3ea995d15335ef0b8aa9da9841c368a8bd3591fb7c3ea01d94ee22acc2c3d0fb" gracePeriod=2 Jun 06 10:53:26 crc kubenswrapper[4911]: I0606 10:53:26.777348 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:26 crc kubenswrapper[4911]: I0606 10:53:26.778374 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:26 crc kubenswrapper[4911]: I0606 10:53:26.856666 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.048287 4911 generic.go:334] "Generic (PLEG): container finished" podID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerID="3ea995d15335ef0b8aa9da9841c368a8bd3591fb7c3ea01d94ee22acc2c3d0fb" exitCode=0 Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.048383 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwbp8" event={"ID":"37d6de86-1607-4b21-88bd-6c8962929ce1","Type":"ContainerDied","Data":"3ea995d15335ef0b8aa9da9841c368a8bd3591fb7c3ea01d94ee22acc2c3d0fb"} Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.052701 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtkzz" event={"ID":"8159172f-a989-4f49-a61a-817712278ad5","Type":"ContainerStarted","Data":"cefeff3ec081f8f7974a5a0242f2b5ce13283f2c153b18a2af5b4094cb66295b"} Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.076118 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rtkzz" podStartSLOduration=2.474222336 podStartE2EDuration="10.076075847s" podCreationTimestamp="2025-06-06 10:53:17 +0000 UTC" firstStartedPulling="2025-06-06 10:53:18.963589086 +0000 UTC m=+6010.239014629" lastFinishedPulling="2025-06-06 10:53:26.565442607 +0000 UTC m=+6017.840868140" observedRunningTime="2025-06-06 10:53:27.075653357 +0000 UTC m=+6018.351078900" watchObservedRunningTime="2025-06-06 10:53:27.076075847 +0000 UTC m=+6018.351501390" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.103079 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.376318 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.376398 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.823264 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.948276 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hv86\" (UniqueName: \"kubernetes.io/projected/37d6de86-1607-4b21-88bd-6c8962929ce1-kube-api-access-7hv86\") pod \"37d6de86-1607-4b21-88bd-6c8962929ce1\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.948382 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-catalog-content\") pod \"37d6de86-1607-4b21-88bd-6c8962929ce1\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.948601 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-utilities\") pod \"37d6de86-1607-4b21-88bd-6c8962929ce1\" (UID: \"37d6de86-1607-4b21-88bd-6c8962929ce1\") " Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.949935 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-utilities" (OuterVolumeSpecName: "utilities") pod "37d6de86-1607-4b21-88bd-6c8962929ce1" (UID: "37d6de86-1607-4b21-88bd-6c8962929ce1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.950441 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.957034 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37d6de86-1607-4b21-88bd-6c8962929ce1-kube-api-access-7hv86" (OuterVolumeSpecName: "kube-api-access-7hv86") pod "37d6de86-1607-4b21-88bd-6c8962929ce1" (UID: "37d6de86-1607-4b21-88bd-6c8962929ce1"). InnerVolumeSpecName "kube-api-access-7hv86". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:53:27 crc kubenswrapper[4911]: I0606 10:53:27.980541 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37d6de86-1607-4b21-88bd-6c8962929ce1" (UID: "37d6de86-1607-4b21-88bd-6c8962929ce1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.052319 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hv86\" (UniqueName: \"kubernetes.io/projected/37d6de86-1607-4b21-88bd-6c8962929ce1-kube-api-access-7hv86\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.052350 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37d6de86-1607-4b21-88bd-6c8962929ce1-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.063891 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwbp8" event={"ID":"37d6de86-1607-4b21-88bd-6c8962929ce1","Type":"ContainerDied","Data":"a577b341b7184d0cf00919cad674ee439737ab242a972e88def2fcd5cfb4a39f"} Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.063960 4911 scope.go:117] "RemoveContainer" containerID="3ea995d15335ef0b8aa9da9841c368a8bd3591fb7c3ea01d94ee22acc2c3d0fb" Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.063988 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwbp8" Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.095231 4911 scope.go:117] "RemoveContainer" containerID="c3a163c52cc055a9fe657408359fd92ea640fd3f96a267d9fee6628dc42d1e57" Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.100184 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wwbp8"] Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.110909 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wwbp8"] Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.124304 4911 scope.go:117] "RemoveContainer" containerID="2fbbc1bbb41baf95021246bd05160c84d5e6053a83f1b8f9cfc4a97b09c41621" Jun 06 10:53:28 crc kubenswrapper[4911]: I0606 10:53:28.435620 4911 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rtkzz" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="registry-server" probeResult="failure" output=< Jun 06 10:53:28 crc kubenswrapper[4911]: timeout: failed to connect service ":50051" within 1s Jun 06 10:53:28 crc kubenswrapper[4911]: > Jun 06 10:53:29 crc kubenswrapper[4911]: I0606 10:53:29.219738 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2d68j"] Jun 06 10:53:29 crc kubenswrapper[4911]: I0606 10:53:29.962970 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" path="/var/lib/kubelet/pods/37d6de86-1607-4b21-88bd-6c8962929ce1/volumes" Jun 06 10:53:30 crc kubenswrapper[4911]: I0606 10:53:30.083172 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2d68j" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="registry-server" containerID="cri-o://2029d59d04e5fbc97a9e90e810fd0596e485d173465cf8115540b6c200cf5547" gracePeriod=2 Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.099906 4911 generic.go:334] "Generic (PLEG): container finished" podID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerID="2029d59d04e5fbc97a9e90e810fd0596e485d173465cf8115540b6c200cf5547" exitCode=0 Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.100598 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2d68j" event={"ID":"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96","Type":"ContainerDied","Data":"2029d59d04e5fbc97a9e90e810fd0596e485d173465cf8115540b6c200cf5547"} Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.558006 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.630708 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-utilities\") pod \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.630764 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-catalog-content\") pod \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.630823 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6c9c\" (UniqueName: \"kubernetes.io/projected/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-kube-api-access-q6c9c\") pod \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\" (UID: \"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96\") " Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.632829 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-utilities" (OuterVolumeSpecName: "utilities") pod "b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" (UID: "b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.640544 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-kube-api-access-q6c9c" (OuterVolumeSpecName: "kube-api-access-q6c9c") pod "b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" (UID: "b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96"). InnerVolumeSpecName "kube-api-access-q6c9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.642086 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" (UID: "b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.733559 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.734023 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:31 crc kubenswrapper[4911]: I0606 10:53:31.734038 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6c9c\" (UniqueName: \"kubernetes.io/projected/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96-kube-api-access-q6c9c\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:32 crc kubenswrapper[4911]: I0606 10:53:32.114518 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2d68j" event={"ID":"b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96","Type":"ContainerDied","Data":"67e54b5b10c9595559a081e6d33be2da7a4424632ebbbf4e46752fb35518a1b0"} Jun 06 10:53:32 crc kubenswrapper[4911]: I0606 10:53:32.114594 4911 scope.go:117] "RemoveContainer" containerID="2029d59d04e5fbc97a9e90e810fd0596e485d173465cf8115540b6c200cf5547" Jun 06 10:53:32 crc kubenswrapper[4911]: I0606 10:53:32.115338 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2d68j" Jun 06 10:53:32 crc kubenswrapper[4911]: I0606 10:53:32.140166 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2d68j"] Jun 06 10:53:32 crc kubenswrapper[4911]: I0606 10:53:32.141056 4911 scope.go:117] "RemoveContainer" containerID="4d8a38d7c10352f6432658ff5866f614602dcc537ba46638afe74118ceb4aa56" Jun 06 10:53:32 crc kubenswrapper[4911]: I0606 10:53:32.148532 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2d68j"] Jun 06 10:53:32 crc kubenswrapper[4911]: I0606 10:53:32.164979 4911 scope.go:117] "RemoveContainer" containerID="bdcbab9065682cff266740e656b290cc01f3c5c029c4b07c8d6cf077deb5d907" Jun 06 10:53:33 crc kubenswrapper[4911]: I0606 10:53:33.961516 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" path="/var/lib/kubelet/pods/b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96/volumes" Jun 06 10:53:37 crc kubenswrapper[4911]: I0606 10:53:37.440119 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:37 crc kubenswrapper[4911]: I0606 10:53:37.506264 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:37 crc kubenswrapper[4911]: I0606 10:53:37.703859 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rtkzz"] Jun 06 10:53:39 crc kubenswrapper[4911]: I0606 10:53:39.196251 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rtkzz" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="registry-server" containerID="cri-o://cefeff3ec081f8f7974a5a0242f2b5ce13283f2c153b18a2af5b4094cb66295b" gracePeriod=2 Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.216491 4911 generic.go:334] "Generic (PLEG): container finished" podID="8159172f-a989-4f49-a61a-817712278ad5" containerID="cefeff3ec081f8f7974a5a0242f2b5ce13283f2c153b18a2af5b4094cb66295b" exitCode=0 Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.216562 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtkzz" event={"ID":"8159172f-a989-4f49-a61a-817712278ad5","Type":"ContainerDied","Data":"cefeff3ec081f8f7974a5a0242f2b5ce13283f2c153b18a2af5b4094cb66295b"} Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.464657 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.542421 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-utilities\") pod \"8159172f-a989-4f49-a61a-817712278ad5\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.542574 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-catalog-content\") pod \"8159172f-a989-4f49-a61a-817712278ad5\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.542748 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4z2r\" (UniqueName: \"kubernetes.io/projected/8159172f-a989-4f49-a61a-817712278ad5-kube-api-access-p4z2r\") pod \"8159172f-a989-4f49-a61a-817712278ad5\" (UID: \"8159172f-a989-4f49-a61a-817712278ad5\") " Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.544501 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-utilities" (OuterVolumeSpecName: "utilities") pod "8159172f-a989-4f49-a61a-817712278ad5" (UID: "8159172f-a989-4f49-a61a-817712278ad5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.550859 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8159172f-a989-4f49-a61a-817712278ad5-kube-api-access-p4z2r" (OuterVolumeSpecName: "kube-api-access-p4z2r") pod "8159172f-a989-4f49-a61a-817712278ad5" (UID: "8159172f-a989-4f49-a61a-817712278ad5"). InnerVolumeSpecName "kube-api-access-p4z2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.605237 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8159172f-a989-4f49-a61a-817712278ad5" (UID: "8159172f-a989-4f49-a61a-817712278ad5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.645752 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4z2r\" (UniqueName: \"kubernetes.io/projected/8159172f-a989-4f49-a61a-817712278ad5-kube-api-access-p4z2r\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.645814 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:40 crc kubenswrapper[4911]: I0606 10:53:40.645834 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8159172f-a989-4f49-a61a-817712278ad5-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.230075 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtkzz" event={"ID":"8159172f-a989-4f49-a61a-817712278ad5","Type":"ContainerDied","Data":"958af9d948245c422b75391947208c25bf44b48e2f386a256dbba8c493cbd39b"} Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.230230 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtkzz" Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.230388 4911 scope.go:117] "RemoveContainer" containerID="cefeff3ec081f8f7974a5a0242f2b5ce13283f2c153b18a2af5b4094cb66295b" Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.255352 4911 scope.go:117] "RemoveContainer" containerID="41ec2301a4e67afdf65ad0ba12159fb72a690aac1732d7504ee2f43dd791ddec" Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.278409 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rtkzz"] Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.289208 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rtkzz"] Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.299396 4911 scope.go:117] "RemoveContainer" containerID="eaa06a3f497fa63fcc3105f8b034fa29c390e658602c76c4a7a7c8788672131d" Jun 06 10:53:41 crc kubenswrapper[4911]: I0606 10:53:41.963075 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8159172f-a989-4f49-a61a-817712278ad5" path="/var/lib/kubelet/pods/8159172f-a989-4f49-a61a-817712278ad5/volumes" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.550987 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-t5jbp"] Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552274 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="extract-content" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552290 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="extract-content" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552311 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="extract-content" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552317 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="extract-content" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552327 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="extract-content" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552333 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="extract-content" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552349 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552356 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552364 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="extract-utilities" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552372 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="extract-utilities" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552381 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="extract-utilities" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552437 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="extract-utilities" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552448 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552454 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552473 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="extract-utilities" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552481 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="extract-utilities" Jun 06 10:54:01 crc kubenswrapper[4911]: E0606 10:54:01.552519 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552526 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552775 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="37d6de86-1607-4b21-88bd-6c8962929ce1" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552790 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4cd95b1-3ef4-4d45-b0d4-9b80e71d2d96" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.552830 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8159172f-a989-4f49-a61a-817712278ad5" containerName="registry-server" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.553930 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t5jbp" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.684836 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb026013-e91d-4184-91d3-c781d53399a0-host\") pod \"crc-debug-t5jbp\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " pod="openstack/crc-debug-t5jbp" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.684905 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqfjw\" (UniqueName: \"kubernetes.io/projected/fb026013-e91d-4184-91d3-c781d53399a0-kube-api-access-dqfjw\") pod \"crc-debug-t5jbp\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " pod="openstack/crc-debug-t5jbp" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.787137 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb026013-e91d-4184-91d3-c781d53399a0-host\") pod \"crc-debug-t5jbp\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " pod="openstack/crc-debug-t5jbp" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.787202 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqfjw\" (UniqueName: \"kubernetes.io/projected/fb026013-e91d-4184-91d3-c781d53399a0-kube-api-access-dqfjw\") pod \"crc-debug-t5jbp\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " pod="openstack/crc-debug-t5jbp" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.787286 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb026013-e91d-4184-91d3-c781d53399a0-host\") pod \"crc-debug-t5jbp\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " pod="openstack/crc-debug-t5jbp" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.811928 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqfjw\" (UniqueName: \"kubernetes.io/projected/fb026013-e91d-4184-91d3-c781d53399a0-kube-api-access-dqfjw\") pod \"crc-debug-t5jbp\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " pod="openstack/crc-debug-t5jbp" Jun 06 10:54:01 crc kubenswrapper[4911]: I0606 10:54:01.878658 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t5jbp" Jun 06 10:54:02 crc kubenswrapper[4911]: I0606 10:54:02.458454 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-t5jbp" event={"ID":"fb026013-e91d-4184-91d3-c781d53399a0","Type":"ContainerStarted","Data":"e90b2a1fc9ffe84c34d5c9cf9e779022fb38b1659e76afc938e577858cb7920e"} Jun 06 10:54:03 crc kubenswrapper[4911]: I0606 10:54:03.475540 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-t5jbp" event={"ID":"fb026013-e91d-4184-91d3-c781d53399a0","Type":"ContainerStarted","Data":"4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b"} Jun 06 10:54:03 crc kubenswrapper[4911]: I0606 10:54:03.496384 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-t5jbp" podStartSLOduration=2.496355683 podStartE2EDuration="2.496355683s" podCreationTimestamp="2025-06-06 10:54:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:54:03.489246279 +0000 UTC m=+6054.764671822" watchObservedRunningTime="2025-06-06 10:54:03.496355683 +0000 UTC m=+6054.771781226" Jun 06 10:54:12 crc kubenswrapper[4911]: I0606 10:54:12.905113 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-t5jbp"] Jun 06 10:54:12 crc kubenswrapper[4911]: I0606 10:54:12.906186 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-t5jbp" podUID="fb026013-e91d-4184-91d3-c781d53399a0" containerName="container-00" containerID="cri-o://4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b" gracePeriod=2 Jun 06 10:54:12 crc kubenswrapper[4911]: I0606 10:54:12.916517 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-t5jbp"] Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.004923 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t5jbp" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.163803 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb026013-e91d-4184-91d3-c781d53399a0-host\") pod \"fb026013-e91d-4184-91d3-c781d53399a0\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.164009 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb026013-e91d-4184-91d3-c781d53399a0-host" (OuterVolumeSpecName: "host") pod "fb026013-e91d-4184-91d3-c781d53399a0" (UID: "fb026013-e91d-4184-91d3-c781d53399a0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.164052 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqfjw\" (UniqueName: \"kubernetes.io/projected/fb026013-e91d-4184-91d3-c781d53399a0-kube-api-access-dqfjw\") pod \"fb026013-e91d-4184-91d3-c781d53399a0\" (UID: \"fb026013-e91d-4184-91d3-c781d53399a0\") " Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.164988 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fb026013-e91d-4184-91d3-c781d53399a0-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.174050 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb026013-e91d-4184-91d3-c781d53399a0-kube-api-access-dqfjw" (OuterVolumeSpecName: "kube-api-access-dqfjw") pod "fb026013-e91d-4184-91d3-c781d53399a0" (UID: "fb026013-e91d-4184-91d3-c781d53399a0"). InnerVolumeSpecName "kube-api-access-dqfjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.267079 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqfjw\" (UniqueName: \"kubernetes.io/projected/fb026013-e91d-4184-91d3-c781d53399a0-kube-api-access-dqfjw\") on node \"crc\" DevicePath \"\"" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.571228 4911 generic.go:334] "Generic (PLEG): container finished" podID="fb026013-e91d-4184-91d3-c781d53399a0" containerID="4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b" exitCode=0 Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.571296 4911 scope.go:117] "RemoveContainer" containerID="4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.571406 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t5jbp" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.617993 4911 scope.go:117] "RemoveContainer" containerID="4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b" Jun 06 10:54:13 crc kubenswrapper[4911]: E0606 10:54:13.618901 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b\": container with ID starting with 4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b not found: ID does not exist" containerID="4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.618941 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b"} err="failed to get container status \"4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b\": rpc error: code = NotFound desc = could not find container \"4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b\": container with ID starting with 4340f112b742dd6b010bf921b418ee89179119dab44617c6731d1c963ea76b4b not found: ID does not exist" Jun 06 10:54:13 crc kubenswrapper[4911]: I0606 10:54:13.963136 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb026013-e91d-4184-91d3-c781d53399a0" path="/var/lib/kubelet/pods/fb026013-e91d-4184-91d3-c781d53399a0/volumes" Jun 06 10:54:24 crc kubenswrapper[4911]: I0606 10:54:24.484264 4911 scope.go:117] "RemoveContainer" containerID="aecee8523b44ec6a4cae0722d0ba54573106f875d0bdbbde035c453b87165991" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.314728 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-2nm4p"] Jun 06 10:55:02 crc kubenswrapper[4911]: E0606 10:55:02.316299 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb026013-e91d-4184-91d3-c781d53399a0" containerName="container-00" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.316327 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb026013-e91d-4184-91d3-c781d53399a0" containerName="container-00" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.316619 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb026013-e91d-4184-91d3-c781d53399a0" containerName="container-00" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.317808 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nm4p" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.491458 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx58k\" (UniqueName: \"kubernetes.io/projected/243a139f-f759-49a1-aa11-42f3d3dfc9e6-kube-api-access-hx58k\") pod \"crc-debug-2nm4p\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " pod="openstack/crc-debug-2nm4p" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.491559 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/243a139f-f759-49a1-aa11-42f3d3dfc9e6-host\") pod \"crc-debug-2nm4p\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " pod="openstack/crc-debug-2nm4p" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.594309 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx58k\" (UniqueName: \"kubernetes.io/projected/243a139f-f759-49a1-aa11-42f3d3dfc9e6-kube-api-access-hx58k\") pod \"crc-debug-2nm4p\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " pod="openstack/crc-debug-2nm4p" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.594406 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/243a139f-f759-49a1-aa11-42f3d3dfc9e6-host\") pod \"crc-debug-2nm4p\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " pod="openstack/crc-debug-2nm4p" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.594653 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/243a139f-f759-49a1-aa11-42f3d3dfc9e6-host\") pod \"crc-debug-2nm4p\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " pod="openstack/crc-debug-2nm4p" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.622183 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx58k\" (UniqueName: \"kubernetes.io/projected/243a139f-f759-49a1-aa11-42f3d3dfc9e6-kube-api-access-hx58k\") pod \"crc-debug-2nm4p\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " pod="openstack/crc-debug-2nm4p" Jun 06 10:55:02 crc kubenswrapper[4911]: I0606 10:55:02.659541 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nm4p" Jun 06 10:55:03 crc kubenswrapper[4911]: I0606 10:55:03.109329 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2nm4p" event={"ID":"243a139f-f759-49a1-aa11-42f3d3dfc9e6","Type":"ContainerStarted","Data":"67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a"} Jun 06 10:55:03 crc kubenswrapper[4911]: I0606 10:55:03.109724 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-2nm4p" event={"ID":"243a139f-f759-49a1-aa11-42f3d3dfc9e6","Type":"ContainerStarted","Data":"59c046a6fc175551f050b2253e2fb4a73b8ff3a0203af1f82591a9fe42ca5105"} Jun 06 10:55:03 crc kubenswrapper[4911]: I0606 10:55:03.147153 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-2nm4p" podStartSLOduration=1.147080918 podStartE2EDuration="1.147080918s" podCreationTimestamp="2025-06-06 10:55:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:55:03.125783558 +0000 UTC m=+6114.401209101" watchObservedRunningTime="2025-06-06 10:55:03.147080918 +0000 UTC m=+6114.422506471" Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.345706 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-2nm4p"] Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.347440 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-2nm4p" podUID="243a139f-f759-49a1-aa11-42f3d3dfc9e6" containerName="container-00" containerID="cri-o://67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a" gracePeriod=2 Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.355052 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-2nm4p"] Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.465151 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nm4p" Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.646999 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/243a139f-f759-49a1-aa11-42f3d3dfc9e6-host\") pod \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.647124 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/243a139f-f759-49a1-aa11-42f3d3dfc9e6-host" (OuterVolumeSpecName: "host") pod "243a139f-f759-49a1-aa11-42f3d3dfc9e6" (UID: "243a139f-f759-49a1-aa11-42f3d3dfc9e6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.647198 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hx58k\" (UniqueName: \"kubernetes.io/projected/243a139f-f759-49a1-aa11-42f3d3dfc9e6-kube-api-access-hx58k\") pod \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\" (UID: \"243a139f-f759-49a1-aa11-42f3d3dfc9e6\") " Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.647609 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/243a139f-f759-49a1-aa11-42f3d3dfc9e6-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.654231 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/243a139f-f759-49a1-aa11-42f3d3dfc9e6-kube-api-access-hx58k" (OuterVolumeSpecName: "kube-api-access-hx58k") pod "243a139f-f759-49a1-aa11-42f3d3dfc9e6" (UID: "243a139f-f759-49a1-aa11-42f3d3dfc9e6"). InnerVolumeSpecName "kube-api-access-hx58k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.749469 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hx58k\" (UniqueName: \"kubernetes.io/projected/243a139f-f759-49a1-aa11-42f3d3dfc9e6-kube-api-access-hx58k\") on node \"crc\" DevicePath \"\"" Jun 06 10:55:13 crc kubenswrapper[4911]: I0606 10:55:13.966923 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="243a139f-f759-49a1-aa11-42f3d3dfc9e6" path="/var/lib/kubelet/pods/243a139f-f759-49a1-aa11-42f3d3dfc9e6/volumes" Jun 06 10:55:14 crc kubenswrapper[4911]: I0606 10:55:14.219227 4911 generic.go:334] "Generic (PLEG): container finished" podID="243a139f-f759-49a1-aa11-42f3d3dfc9e6" containerID="67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a" exitCode=0 Jun 06 10:55:14 crc kubenswrapper[4911]: I0606 10:55:14.219285 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-2nm4p" Jun 06 10:55:14 crc kubenswrapper[4911]: I0606 10:55:14.219353 4911 scope.go:117] "RemoveContainer" containerID="67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a" Jun 06 10:55:14 crc kubenswrapper[4911]: I0606 10:55:14.244051 4911 scope.go:117] "RemoveContainer" containerID="67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a" Jun 06 10:55:14 crc kubenswrapper[4911]: E0606 10:55:14.244593 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a\": container with ID starting with 67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a not found: ID does not exist" containerID="67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a" Jun 06 10:55:14 crc kubenswrapper[4911]: I0606 10:55:14.244649 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a"} err="failed to get container status \"67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a\": rpc error: code = NotFound desc = could not find container \"67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a\": container with ID starting with 67d38bb1fcfda807a731edce523190d3e493d88b3ebfe35396f50b54456eff3a not found: ID does not exist" Jun 06 10:55:24 crc kubenswrapper[4911]: I0606 10:55:24.300893 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:55:24 crc kubenswrapper[4911]: I0606 10:55:24.301463 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:55:54 crc kubenswrapper[4911]: I0606 10:55:54.300824 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:55:54 crc kubenswrapper[4911]: I0606 10:55:54.301518 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.767837 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-6fvsb"] Jun 06 10:56:01 crc kubenswrapper[4911]: E0606 10:56:01.769182 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="243a139f-f759-49a1-aa11-42f3d3dfc9e6" containerName="container-00" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.769200 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="243a139f-f759-49a1-aa11-42f3d3dfc9e6" containerName="container-00" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.769417 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="243a139f-f759-49a1-aa11-42f3d3dfc9e6" containerName="container-00" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.770156 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6fvsb" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.843770 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzt6n\" (UniqueName: \"kubernetes.io/projected/edfff1e3-e91f-4e9e-8894-b582e7cda56d-kube-api-access-vzt6n\") pod \"crc-debug-6fvsb\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " pod="openstack/crc-debug-6fvsb" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.843900 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/edfff1e3-e91f-4e9e-8894-b582e7cda56d-host\") pod \"crc-debug-6fvsb\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " pod="openstack/crc-debug-6fvsb" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.945854 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzt6n\" (UniqueName: \"kubernetes.io/projected/edfff1e3-e91f-4e9e-8894-b582e7cda56d-kube-api-access-vzt6n\") pod \"crc-debug-6fvsb\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " pod="openstack/crc-debug-6fvsb" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.945969 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/edfff1e3-e91f-4e9e-8894-b582e7cda56d-host\") pod \"crc-debug-6fvsb\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " pod="openstack/crc-debug-6fvsb" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.946110 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/edfff1e3-e91f-4e9e-8894-b582e7cda56d-host\") pod \"crc-debug-6fvsb\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " pod="openstack/crc-debug-6fvsb" Jun 06 10:56:01 crc kubenswrapper[4911]: I0606 10:56:01.972593 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzt6n\" (UniqueName: \"kubernetes.io/projected/edfff1e3-e91f-4e9e-8894-b582e7cda56d-kube-api-access-vzt6n\") pod \"crc-debug-6fvsb\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " pod="openstack/crc-debug-6fvsb" Jun 06 10:56:02 crc kubenswrapper[4911]: I0606 10:56:02.093378 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6fvsb" Jun 06 10:56:02 crc kubenswrapper[4911]: I0606 10:56:02.732429 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6fvsb" event={"ID":"edfff1e3-e91f-4e9e-8894-b582e7cda56d","Type":"ContainerStarted","Data":"6ebba9e7c12b151a4fe5d8d737e9cecf025b739f2823198c2d48b3bc52b6b77a"} Jun 06 10:56:02 crc kubenswrapper[4911]: I0606 10:56:02.732947 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6fvsb" event={"ID":"edfff1e3-e91f-4e9e-8894-b582e7cda56d","Type":"ContainerStarted","Data":"dd0812d9b7524c88f2f82790fdd670ce6e173b28f64b3708dbe2fbc9c61f2bcd"} Jun 06 10:56:02 crc kubenswrapper[4911]: I0606 10:56:02.752681 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-6fvsb" podStartSLOduration=1.75265944 podStartE2EDuration="1.75265944s" podCreationTimestamp="2025-06-06 10:56:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:56:02.746940532 +0000 UTC m=+6174.022366075" watchObservedRunningTime="2025-06-06 10:56:02.75265944 +0000 UTC m=+6174.028084983" Jun 06 10:56:12 crc kubenswrapper[4911]: I0606 10:56:12.742504 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-6fvsb"] Jun 06 10:56:12 crc kubenswrapper[4911]: I0606 10:56:12.743814 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-6fvsb" podUID="edfff1e3-e91f-4e9e-8894-b582e7cda56d" containerName="container-00" containerID="cri-o://6ebba9e7c12b151a4fe5d8d737e9cecf025b739f2823198c2d48b3bc52b6b77a" gracePeriod=2 Jun 06 10:56:12 crc kubenswrapper[4911]: I0606 10:56:12.753912 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-6fvsb"] Jun 06 10:56:12 crc kubenswrapper[4911]: I0606 10:56:12.826532 4911 generic.go:334] "Generic (PLEG): container finished" podID="edfff1e3-e91f-4e9e-8894-b582e7cda56d" containerID="6ebba9e7c12b151a4fe5d8d737e9cecf025b739f2823198c2d48b3bc52b6b77a" exitCode=0 Jun 06 10:56:12 crc kubenswrapper[4911]: I0606 10:56:12.826602 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd0812d9b7524c88f2f82790fdd670ce6e173b28f64b3708dbe2fbc9c61f2bcd" Jun 06 10:56:12 crc kubenswrapper[4911]: I0606 10:56:12.855671 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6fvsb" Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.006293 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzt6n\" (UniqueName: \"kubernetes.io/projected/edfff1e3-e91f-4e9e-8894-b582e7cda56d-kube-api-access-vzt6n\") pod \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.006755 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/edfff1e3-e91f-4e9e-8894-b582e7cda56d-host\") pod \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\" (UID: \"edfff1e3-e91f-4e9e-8894-b582e7cda56d\") " Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.006843 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/edfff1e3-e91f-4e9e-8894-b582e7cda56d-host" (OuterVolumeSpecName: "host") pod "edfff1e3-e91f-4e9e-8894-b582e7cda56d" (UID: "edfff1e3-e91f-4e9e-8894-b582e7cda56d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.007496 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/edfff1e3-e91f-4e9e-8894-b582e7cda56d-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.016085 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edfff1e3-e91f-4e9e-8894-b582e7cda56d-kube-api-access-vzt6n" (OuterVolumeSpecName: "kube-api-access-vzt6n") pod "edfff1e3-e91f-4e9e-8894-b582e7cda56d" (UID: "edfff1e3-e91f-4e9e-8894-b582e7cda56d"). InnerVolumeSpecName "kube-api-access-vzt6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.110745 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzt6n\" (UniqueName: \"kubernetes.io/projected/edfff1e3-e91f-4e9e-8894-b582e7cda56d-kube-api-access-vzt6n\") on node \"crc\" DevicePath \"\"" Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.834878 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6fvsb" Jun 06 10:56:13 crc kubenswrapper[4911]: I0606 10:56:13.962535 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edfff1e3-e91f-4e9e-8894-b582e7cda56d" path="/var/lib/kubelet/pods/edfff1e3-e91f-4e9e-8894-b582e7cda56d/volumes" Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.300119 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.301012 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.301118 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.302408 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e88856e5e2ab29572a14d0db7f394f9fe00a7119de7168db32b1ebc14063c4a9"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.302495 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://e88856e5e2ab29572a14d0db7f394f9fe00a7119de7168db32b1ebc14063c4a9" gracePeriod=600 Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.943840 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="e88856e5e2ab29572a14d0db7f394f9fe00a7119de7168db32b1ebc14063c4a9" exitCode=0 Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.944036 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"e88856e5e2ab29572a14d0db7f394f9fe00a7119de7168db32b1ebc14063c4a9"} Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.944290 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6"} Jun 06 10:56:24 crc kubenswrapper[4911]: I0606 10:56:24.944323 4911 scope.go:117] "RemoveContainer" containerID="3af1347581f3a04b88c67852bb1eaebc26b2b71d5ebe8175357524aaccfeeb3c" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.171306 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-7lnqd"] Jun 06 10:57:02 crc kubenswrapper[4911]: E0606 10:57:02.172205 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfff1e3-e91f-4e9e-8894-b582e7cda56d" containerName="container-00" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.172222 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfff1e3-e91f-4e9e-8894-b582e7cda56d" containerName="container-00" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.172458 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="edfff1e3-e91f-4e9e-8894-b582e7cda56d" containerName="container-00" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.173183 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7lnqd" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.261061 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492476db-dd01-41f7-9d28-f80f350c9bc7-host\") pod \"crc-debug-7lnqd\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " pod="openstack/crc-debug-7lnqd" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.261338 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-282lt\" (UniqueName: \"kubernetes.io/projected/492476db-dd01-41f7-9d28-f80f350c9bc7-kube-api-access-282lt\") pod \"crc-debug-7lnqd\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " pod="openstack/crc-debug-7lnqd" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.363409 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492476db-dd01-41f7-9d28-f80f350c9bc7-host\") pod \"crc-debug-7lnqd\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " pod="openstack/crc-debug-7lnqd" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.363566 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492476db-dd01-41f7-9d28-f80f350c9bc7-host\") pod \"crc-debug-7lnqd\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " pod="openstack/crc-debug-7lnqd" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.363801 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-282lt\" (UniqueName: \"kubernetes.io/projected/492476db-dd01-41f7-9d28-f80f350c9bc7-kube-api-access-282lt\") pod \"crc-debug-7lnqd\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " pod="openstack/crc-debug-7lnqd" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.384176 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-282lt\" (UniqueName: \"kubernetes.io/projected/492476db-dd01-41f7-9d28-f80f350c9bc7-kube-api-access-282lt\") pod \"crc-debug-7lnqd\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " pod="openstack/crc-debug-7lnqd" Jun 06 10:57:02 crc kubenswrapper[4911]: I0606 10:57:02.497789 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7lnqd" Jun 06 10:57:03 crc kubenswrapper[4911]: I0606 10:57:03.369314 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-7lnqd" event={"ID":"492476db-dd01-41f7-9d28-f80f350c9bc7","Type":"ContainerStarted","Data":"bcf4c6f61067c759c9243dc23a2e5d204d9e50a59946fdafce2220717c3fab11"} Jun 06 10:57:03 crc kubenswrapper[4911]: I0606 10:57:03.369793 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-7lnqd" event={"ID":"492476db-dd01-41f7-9d28-f80f350c9bc7","Type":"ContainerStarted","Data":"37d5b4a0f95b0570ad11df97a3ed0c169bdcdc6d70546de7b0b8b66c2216345a"} Jun 06 10:57:03 crc kubenswrapper[4911]: I0606 10:57:03.394749 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-7lnqd" podStartSLOduration=1.394721505 podStartE2EDuration="1.394721505s" podCreationTimestamp="2025-06-06 10:57:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:57:03.385682932 +0000 UTC m=+6234.661108505" watchObservedRunningTime="2025-06-06 10:57:03.394721505 +0000 UTC m=+6234.670147058" Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.425141 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-7lnqd"] Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.425758 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-7lnqd" podUID="492476db-dd01-41f7-9d28-f80f350c9bc7" containerName="container-00" containerID="cri-o://bcf4c6f61067c759c9243dc23a2e5d204d9e50a59946fdafce2220717c3fab11" gracePeriod=2 Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.436750 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-7lnqd"] Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.473846 4911 generic.go:334] "Generic (PLEG): container finished" podID="492476db-dd01-41f7-9d28-f80f350c9bc7" containerID="bcf4c6f61067c759c9243dc23a2e5d204d9e50a59946fdafce2220717c3fab11" exitCode=0 Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.604016 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7lnqd" Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.709826 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492476db-dd01-41f7-9d28-f80f350c9bc7-host\") pod \"492476db-dd01-41f7-9d28-f80f350c9bc7\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.709896 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/492476db-dd01-41f7-9d28-f80f350c9bc7-host" (OuterVolumeSpecName: "host") pod "492476db-dd01-41f7-9d28-f80f350c9bc7" (UID: "492476db-dd01-41f7-9d28-f80f350c9bc7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.709910 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-282lt\" (UniqueName: \"kubernetes.io/projected/492476db-dd01-41f7-9d28-f80f350c9bc7-kube-api-access-282lt\") pod \"492476db-dd01-41f7-9d28-f80f350c9bc7\" (UID: \"492476db-dd01-41f7-9d28-f80f350c9bc7\") " Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.710500 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/492476db-dd01-41f7-9d28-f80f350c9bc7-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.723748 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/492476db-dd01-41f7-9d28-f80f350c9bc7-kube-api-access-282lt" (OuterVolumeSpecName: "kube-api-access-282lt") pod "492476db-dd01-41f7-9d28-f80f350c9bc7" (UID: "492476db-dd01-41f7-9d28-f80f350c9bc7"). InnerVolumeSpecName "kube-api-access-282lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.812184 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-282lt\" (UniqueName: \"kubernetes.io/projected/492476db-dd01-41f7-9d28-f80f350c9bc7-kube-api-access-282lt\") on node \"crc\" DevicePath \"\"" Jun 06 10:57:13 crc kubenswrapper[4911]: I0606 10:57:13.958844 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="492476db-dd01-41f7-9d28-f80f350c9bc7" path="/var/lib/kubelet/pods/492476db-dd01-41f7-9d28-f80f350c9bc7/volumes" Jun 06 10:57:14 crc kubenswrapper[4911]: I0606 10:57:14.483184 4911 scope.go:117] "RemoveContainer" containerID="bcf4c6f61067c759c9243dc23a2e5d204d9e50a59946fdafce2220717c3fab11" Jun 06 10:57:14 crc kubenswrapper[4911]: I0606 10:57:14.483228 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-7lnqd" Jun 06 10:58:01 crc kubenswrapper[4911]: I0606 10:58:01.845615 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-r6h6l"] Jun 06 10:58:01 crc kubenswrapper[4911]: E0606 10:58:01.846834 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="492476db-dd01-41f7-9d28-f80f350c9bc7" containerName="container-00" Jun 06 10:58:01 crc kubenswrapper[4911]: I0606 10:58:01.846856 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="492476db-dd01-41f7-9d28-f80f350c9bc7" containerName="container-00" Jun 06 10:58:01 crc kubenswrapper[4911]: I0606 10:58:01.847137 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="492476db-dd01-41f7-9d28-f80f350c9bc7" containerName="container-00" Jun 06 10:58:01 crc kubenswrapper[4911]: I0606 10:58:01.848032 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-r6h6l" Jun 06 10:58:01 crc kubenswrapper[4911]: I0606 10:58:01.981635 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkz7m\" (UniqueName: \"kubernetes.io/projected/7dcafac0-090f-4f91-b6db-74c8b52f74c1-kube-api-access-tkz7m\") pod \"crc-debug-r6h6l\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " pod="openstack/crc-debug-r6h6l" Jun 06 10:58:01 crc kubenswrapper[4911]: I0606 10:58:01.982453 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7dcafac0-090f-4f91-b6db-74c8b52f74c1-host\") pod \"crc-debug-r6h6l\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " pod="openstack/crc-debug-r6h6l" Jun 06 10:58:02 crc kubenswrapper[4911]: I0606 10:58:02.085857 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7dcafac0-090f-4f91-b6db-74c8b52f74c1-host\") pod \"crc-debug-r6h6l\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " pod="openstack/crc-debug-r6h6l" Jun 06 10:58:02 crc kubenswrapper[4911]: I0606 10:58:02.086339 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7dcafac0-090f-4f91-b6db-74c8b52f74c1-host\") pod \"crc-debug-r6h6l\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " pod="openstack/crc-debug-r6h6l" Jun 06 10:58:02 crc kubenswrapper[4911]: I0606 10:58:02.086688 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkz7m\" (UniqueName: \"kubernetes.io/projected/7dcafac0-090f-4f91-b6db-74c8b52f74c1-kube-api-access-tkz7m\") pod \"crc-debug-r6h6l\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " pod="openstack/crc-debug-r6h6l" Jun 06 10:58:02 crc kubenswrapper[4911]: I0606 10:58:02.112046 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkz7m\" (UniqueName: \"kubernetes.io/projected/7dcafac0-090f-4f91-b6db-74c8b52f74c1-kube-api-access-tkz7m\") pod \"crc-debug-r6h6l\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " pod="openstack/crc-debug-r6h6l" Jun 06 10:58:02 crc kubenswrapper[4911]: I0606 10:58:02.182753 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-r6h6l" Jun 06 10:58:03 crc kubenswrapper[4911]: I0606 10:58:03.012380 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-r6h6l" event={"ID":"7dcafac0-090f-4f91-b6db-74c8b52f74c1","Type":"ContainerStarted","Data":"2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f"} Jun 06 10:58:03 crc kubenswrapper[4911]: I0606 10:58:03.012738 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-r6h6l" event={"ID":"7dcafac0-090f-4f91-b6db-74c8b52f74c1","Type":"ContainerStarted","Data":"326c64162154e1a1e1a2df8586e6254c83e6155bd0b7e69fdef50f63ce76f32b"} Jun 06 10:58:03 crc kubenswrapper[4911]: I0606 10:58:03.039435 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-r6h6l" podStartSLOduration=2.039397036 podStartE2EDuration="2.039397036s" podCreationTimestamp="2025-06-06 10:58:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:58:03.032706633 +0000 UTC m=+6294.308132196" watchObservedRunningTime="2025-06-06 10:58:03.039397036 +0000 UTC m=+6294.314822619" Jun 06 10:58:12 crc kubenswrapper[4911]: I0606 10:58:12.856929 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-r6h6l"] Jun 06 10:58:12 crc kubenswrapper[4911]: I0606 10:58:12.857943 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-r6h6l" podUID="7dcafac0-090f-4f91-b6db-74c8b52f74c1" containerName="container-00" containerID="cri-o://2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f" gracePeriod=2 Jun 06 10:58:12 crc kubenswrapper[4911]: I0606 10:58:12.868045 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-r6h6l"] Jun 06 10:58:12 crc kubenswrapper[4911]: I0606 10:58:12.968002 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-r6h6l" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.020472 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7dcafac0-090f-4f91-b6db-74c8b52f74c1-host\") pod \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.020554 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7dcafac0-090f-4f91-b6db-74c8b52f74c1-host" (OuterVolumeSpecName: "host") pod "7dcafac0-090f-4f91-b6db-74c8b52f74c1" (UID: "7dcafac0-090f-4f91-b6db-74c8b52f74c1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.020751 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkz7m\" (UniqueName: \"kubernetes.io/projected/7dcafac0-090f-4f91-b6db-74c8b52f74c1-kube-api-access-tkz7m\") pod \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\" (UID: \"7dcafac0-090f-4f91-b6db-74c8b52f74c1\") " Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.021509 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7dcafac0-090f-4f91-b6db-74c8b52f74c1-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.029575 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dcafac0-090f-4f91-b6db-74c8b52f74c1-kube-api-access-tkz7m" (OuterVolumeSpecName: "kube-api-access-tkz7m") pod "7dcafac0-090f-4f91-b6db-74c8b52f74c1" (UID: "7dcafac0-090f-4f91-b6db-74c8b52f74c1"). InnerVolumeSpecName "kube-api-access-tkz7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.112383 4911 generic.go:334] "Generic (PLEG): container finished" podID="7dcafac0-090f-4f91-b6db-74c8b52f74c1" containerID="2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f" exitCode=0 Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.112457 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-r6h6l" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.112467 4911 scope.go:117] "RemoveContainer" containerID="2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.123583 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkz7m\" (UniqueName: \"kubernetes.io/projected/7dcafac0-090f-4f91-b6db-74c8b52f74c1-kube-api-access-tkz7m\") on node \"crc\" DevicePath \"\"" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.145181 4911 scope.go:117] "RemoveContainer" containerID="2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f" Jun 06 10:58:13 crc kubenswrapper[4911]: E0606 10:58:13.145897 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f\": container with ID starting with 2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f not found: ID does not exist" containerID="2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.145974 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f"} err="failed to get container status \"2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f\": rpc error: code = NotFound desc = could not find container \"2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f\": container with ID starting with 2caa64e3595aa6fb4a9341611cd51ecc97a2119d73c83dbfb0f8eb8d5c871c0f not found: ID does not exist" Jun 06 10:58:13 crc kubenswrapper[4911]: I0606 10:58:13.960951 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dcafac0-090f-4f91-b6db-74c8b52f74c1" path="/var/lib/kubelet/pods/7dcafac0-090f-4f91-b6db-74c8b52f74c1/volumes" Jun 06 10:58:23 crc kubenswrapper[4911]: I0606 10:58:23.961667 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hf8v8"] Jun 06 10:58:23 crc kubenswrapper[4911]: E0606 10:58:23.962446 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dcafac0-090f-4f91-b6db-74c8b52f74c1" containerName="container-00" Jun 06 10:58:23 crc kubenswrapper[4911]: I0606 10:58:23.962460 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dcafac0-090f-4f91-b6db-74c8b52f74c1" containerName="container-00" Jun 06 10:58:23 crc kubenswrapper[4911]: I0606 10:58:23.962637 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dcafac0-090f-4f91-b6db-74c8b52f74c1" containerName="container-00" Jun 06 10:58:23 crc kubenswrapper[4911]: I0606 10:58:23.964066 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.017795 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hf8v8"] Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.116023 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bmc2\" (UniqueName: \"kubernetes.io/projected/f1551a7b-728b-4d83-890c-ecaaee23238b-kube-api-access-9bmc2\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.116230 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-utilities\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.116335 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-catalog-content\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.218603 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-utilities\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.219044 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-catalog-content\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.219225 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bmc2\" (UniqueName: \"kubernetes.io/projected/f1551a7b-728b-4d83-890c-ecaaee23238b-kube-api-access-9bmc2\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.219452 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-utilities\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.219625 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-catalog-content\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.244261 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bmc2\" (UniqueName: \"kubernetes.io/projected/f1551a7b-728b-4d83-890c-ecaaee23238b-kube-api-access-9bmc2\") pod \"community-operators-hf8v8\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.300734 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.300813 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:58:24 crc kubenswrapper[4911]: I0606 10:58:24.327333 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:25 crc kubenswrapper[4911]: I0606 10:58:25.123028 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hf8v8"] Jun 06 10:58:25 crc kubenswrapper[4911]: I0606 10:58:25.255035 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hf8v8" event={"ID":"f1551a7b-728b-4d83-890c-ecaaee23238b","Type":"ContainerStarted","Data":"93e37124ef27499a7097b86d2aa49e775b90a43c92f57a4aed40a578629834f6"} Jun 06 10:58:26 crc kubenswrapper[4911]: I0606 10:58:26.285367 4911 generic.go:334] "Generic (PLEG): container finished" podID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerID="6cb9df1d6e68c19fdbba9b8733815e36643a159128d0d2e0f21137c425d2c8f3" exitCode=0 Jun 06 10:58:26 crc kubenswrapper[4911]: I0606 10:58:26.285450 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hf8v8" event={"ID":"f1551a7b-728b-4d83-890c-ecaaee23238b","Type":"ContainerDied","Data":"6cb9df1d6e68c19fdbba9b8733815e36643a159128d0d2e0f21137c425d2c8f3"} Jun 06 10:58:26 crc kubenswrapper[4911]: I0606 10:58:26.288243 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 10:58:27 crc kubenswrapper[4911]: I0606 10:58:27.299292 4911 generic.go:334] "Generic (PLEG): container finished" podID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerID="b2a2daaf8026c1866d28f0fd7f57ef26565f27d5664bfbb37185b47b91e316b5" exitCode=0 Jun 06 10:58:27 crc kubenswrapper[4911]: I0606 10:58:27.299424 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hf8v8" event={"ID":"f1551a7b-728b-4d83-890c-ecaaee23238b","Type":"ContainerDied","Data":"b2a2daaf8026c1866d28f0fd7f57ef26565f27d5664bfbb37185b47b91e316b5"} Jun 06 10:58:28 crc kubenswrapper[4911]: I0606 10:58:28.313615 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hf8v8" event={"ID":"f1551a7b-728b-4d83-890c-ecaaee23238b","Type":"ContainerStarted","Data":"1b83afc20bf56d36ec50731ba539124f4d3fe591233624624a9c43948f20e167"} Jun 06 10:58:28 crc kubenswrapper[4911]: I0606 10:58:28.335284 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hf8v8" podStartSLOduration=3.912545766 podStartE2EDuration="5.335257288s" podCreationTimestamp="2025-06-06 10:58:23 +0000 UTC" firstStartedPulling="2025-06-06 10:58:26.287996606 +0000 UTC m=+6317.563422149" lastFinishedPulling="2025-06-06 10:58:27.710708118 +0000 UTC m=+6318.986133671" observedRunningTime="2025-06-06 10:58:28.330164867 +0000 UTC m=+6319.605590410" watchObservedRunningTime="2025-06-06 10:58:28.335257288 +0000 UTC m=+6319.610682821" Jun 06 10:58:34 crc kubenswrapper[4911]: I0606 10:58:34.328325 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:34 crc kubenswrapper[4911]: I0606 10:58:34.329210 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:34 crc kubenswrapper[4911]: I0606 10:58:34.380760 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:34 crc kubenswrapper[4911]: I0606 10:58:34.443997 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:34 crc kubenswrapper[4911]: I0606 10:58:34.627896 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hf8v8"] Jun 06 10:58:36 crc kubenswrapper[4911]: I0606 10:58:36.396424 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hf8v8" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="registry-server" containerID="cri-o://1b83afc20bf56d36ec50731ba539124f4d3fe591233624624a9c43948f20e167" gracePeriod=2 Jun 06 10:58:37 crc kubenswrapper[4911]: I0606 10:58:37.412762 4911 generic.go:334] "Generic (PLEG): container finished" podID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerID="1b83afc20bf56d36ec50731ba539124f4d3fe591233624624a9c43948f20e167" exitCode=0 Jun 06 10:58:37 crc kubenswrapper[4911]: I0606 10:58:37.413013 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hf8v8" event={"ID":"f1551a7b-728b-4d83-890c-ecaaee23238b","Type":"ContainerDied","Data":"1b83afc20bf56d36ec50731ba539124f4d3fe591233624624a9c43948f20e167"} Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.429186 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hf8v8" event={"ID":"f1551a7b-728b-4d83-890c-ecaaee23238b","Type":"ContainerDied","Data":"93e37124ef27499a7097b86d2aa49e775b90a43c92f57a4aed40a578629834f6"} Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.429586 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93e37124ef27499a7097b86d2aa49e775b90a43c92f57a4aed40a578629834f6" Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.465286 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.590013 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-utilities\") pod \"f1551a7b-728b-4d83-890c-ecaaee23238b\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.590323 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bmc2\" (UniqueName: \"kubernetes.io/projected/f1551a7b-728b-4d83-890c-ecaaee23238b-kube-api-access-9bmc2\") pod \"f1551a7b-728b-4d83-890c-ecaaee23238b\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.590414 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-catalog-content\") pod \"f1551a7b-728b-4d83-890c-ecaaee23238b\" (UID: \"f1551a7b-728b-4d83-890c-ecaaee23238b\") " Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.591225 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-utilities" (OuterVolumeSpecName: "utilities") pod "f1551a7b-728b-4d83-890c-ecaaee23238b" (UID: "f1551a7b-728b-4d83-890c-ecaaee23238b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.612586 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1551a7b-728b-4d83-890c-ecaaee23238b-kube-api-access-9bmc2" (OuterVolumeSpecName: "kube-api-access-9bmc2") pod "f1551a7b-728b-4d83-890c-ecaaee23238b" (UID: "f1551a7b-728b-4d83-890c-ecaaee23238b"). InnerVolumeSpecName "kube-api-access-9bmc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.633084 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1551a7b-728b-4d83-890c-ecaaee23238b" (UID: "f1551a7b-728b-4d83-890c-ecaaee23238b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.694489 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.694555 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bmc2\" (UniqueName: \"kubernetes.io/projected/f1551a7b-728b-4d83-890c-ecaaee23238b-kube-api-access-9bmc2\") on node \"crc\" DevicePath \"\"" Jun 06 10:58:38 crc kubenswrapper[4911]: I0606 10:58:38.694575 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1551a7b-728b-4d83-890c-ecaaee23238b-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 10:58:39 crc kubenswrapper[4911]: I0606 10:58:39.436823 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hf8v8" Jun 06 10:58:39 crc kubenswrapper[4911]: I0606 10:58:39.473281 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hf8v8"] Jun 06 10:58:39 crc kubenswrapper[4911]: I0606 10:58:39.486081 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hf8v8"] Jun 06 10:58:39 crc kubenswrapper[4911]: I0606 10:58:39.961190 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" path="/var/lib/kubelet/pods/f1551a7b-728b-4d83-890c-ecaaee23238b/volumes" Jun 06 10:58:54 crc kubenswrapper[4911]: I0606 10:58:54.300725 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:58:54 crc kubenswrapper[4911]: I0606 10:58:54.301340 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.257395 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-t2mj5"] Jun 06 10:59:02 crc kubenswrapper[4911]: E0606 10:59:02.258625 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="extract-utilities" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.258642 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="extract-utilities" Jun 06 10:59:02 crc kubenswrapper[4911]: E0606 10:59:02.258687 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="extract-content" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.258695 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="extract-content" Jun 06 10:59:02 crc kubenswrapper[4911]: E0606 10:59:02.258708 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="registry-server" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.258719 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="registry-server" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.258977 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1551a7b-728b-4d83-890c-ecaaee23238b" containerName="registry-server" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.260118 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.432557 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-host\") pod \"crc-debug-t2mj5\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.432791 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-246vv\" (UniqueName: \"kubernetes.io/projected/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-kube-api-access-246vv\") pod \"crc-debug-t2mj5\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.535347 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-host\") pod \"crc-debug-t2mj5\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.535437 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-246vv\" (UniqueName: \"kubernetes.io/projected/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-kube-api-access-246vv\") pod \"crc-debug-t2mj5\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.535546 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-host\") pod \"crc-debug-t2mj5\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.560016 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-246vv\" (UniqueName: \"kubernetes.io/projected/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-kube-api-access-246vv\") pod \"crc-debug-t2mj5\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.594007 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t2mj5" Jun 06 10:59:02 crc kubenswrapper[4911]: I0606 10:59:02.669115 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-t2mj5" event={"ID":"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c","Type":"ContainerStarted","Data":"55037d95347f4435c0816f84d2cfc8899b89b411638f06cc5a74dadb55d1ca6a"} Jun 06 10:59:03 crc kubenswrapper[4911]: I0606 10:59:03.681926 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-t2mj5" event={"ID":"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c","Type":"ContainerStarted","Data":"ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4"} Jun 06 10:59:03 crc kubenswrapper[4911]: I0606 10:59:03.702578 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-t2mj5" podStartSLOduration=1.702553465 podStartE2EDuration="1.702553465s" podCreationTimestamp="2025-06-06 10:59:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 10:59:03.695257577 +0000 UTC m=+6354.970683130" watchObservedRunningTime="2025-06-06 10:59:03.702553465 +0000 UTC m=+6354.977979008" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.277057 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-t2mj5"] Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.278022 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-t2mj5" podUID="bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" containerName="container-00" containerID="cri-o://ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4" gracePeriod=2 Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.287392 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-t2mj5"] Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.392064 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t2mj5" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.534723 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-246vv\" (UniqueName: \"kubernetes.io/projected/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-kube-api-access-246vv\") pod \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.535072 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-host\") pod \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\" (UID: \"bfcc5f23-cbea-4f0a-bf15-4de822f92c7c\") " Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.535189 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-host" (OuterVolumeSpecName: "host") pod "bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" (UID: "bfcc5f23-cbea-4f0a-bf15-4de822f92c7c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.535758 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-host\") on node \"crc\" DevicePath \"\"" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.553969 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-kube-api-access-246vv" (OuterVolumeSpecName: "kube-api-access-246vv") pod "bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" (UID: "bfcc5f23-cbea-4f0a-bf15-4de822f92c7c"). InnerVolumeSpecName "kube-api-access-246vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.638396 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-246vv\" (UniqueName: \"kubernetes.io/projected/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c-kube-api-access-246vv\") on node \"crc\" DevicePath \"\"" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.806622 4911 generic.go:334] "Generic (PLEG): container finished" podID="bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" containerID="ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4" exitCode=0 Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.806798 4911 scope.go:117] "RemoveContainer" containerID="ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.806891 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-t2mj5" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.834929 4911 scope.go:117] "RemoveContainer" containerID="ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4" Jun 06 10:59:13 crc kubenswrapper[4911]: E0606 10:59:13.835718 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4\": container with ID starting with ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4 not found: ID does not exist" containerID="ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.835772 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4"} err="failed to get container status \"ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4\": rpc error: code = NotFound desc = could not find container \"ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4\": container with ID starting with ee982f525e0b310e8886680328c4351f50980959eb78cb50e38f9287094025b4 not found: ID does not exist" Jun 06 10:59:13 crc kubenswrapper[4911]: I0606 10:59:13.958443 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" path="/var/lib/kubelet/pods/bfcc5f23-cbea-4f0a-bf15-4de822f92c7c/volumes" Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.300055 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.300635 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.300688 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.301495 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.301551 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" gracePeriod=600 Jun 06 10:59:24 crc kubenswrapper[4911]: E0606 10:59:24.429116 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.938383 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" exitCode=0 Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.938454 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6"} Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.938893 4911 scope.go:117] "RemoveContainer" containerID="e88856e5e2ab29572a14d0db7f394f9fe00a7119de7168db32b1ebc14063c4a9" Jun 06 10:59:24 crc kubenswrapper[4911]: I0606 10:59:24.940080 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 10:59:24 crc kubenswrapper[4911]: E0606 10:59:24.940630 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:59:38 crc kubenswrapper[4911]: I0606 10:59:38.947932 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 10:59:38 crc kubenswrapper[4911]: E0606 10:59:38.948755 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 10:59:49 crc kubenswrapper[4911]: I0606 10:59:49.954772 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 10:59:49 crc kubenswrapper[4911]: E0606 10:59:49.955651 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.150273 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx"] Jun 06 11:00:00 crc kubenswrapper[4911]: E0606 11:00:00.151305 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" containerName="container-00" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.151320 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" containerName="container-00" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.151527 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfcc5f23-cbea-4f0a-bf15-4de822f92c7c" containerName="container-00" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.152284 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.155005 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.155762 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.160767 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx"] Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.247045 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/170162cb-4ce4-4fd9-8550-1eb64de6881b-config-volume\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.247145 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/170162cb-4ce4-4fd9-8550-1eb64de6881b-secret-volume\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.247210 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4xch\" (UniqueName: \"kubernetes.io/projected/170162cb-4ce4-4fd9-8550-1eb64de6881b-kube-api-access-q4xch\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.350260 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/170162cb-4ce4-4fd9-8550-1eb64de6881b-config-volume\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.350825 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/170162cb-4ce4-4fd9-8550-1eb64de6881b-secret-volume\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.351050 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4xch\" (UniqueName: \"kubernetes.io/projected/170162cb-4ce4-4fd9-8550-1eb64de6881b-kube-api-access-q4xch\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.351495 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/170162cb-4ce4-4fd9-8550-1eb64de6881b-config-volume\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.359841 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/170162cb-4ce4-4fd9-8550-1eb64de6881b-secret-volume\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.379378 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4xch\" (UniqueName: \"kubernetes.io/projected/170162cb-4ce4-4fd9-8550-1eb64de6881b-kube-api-access-q4xch\") pod \"collect-profiles-29153460-5bvtx\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:00 crc kubenswrapper[4911]: I0606 11:00:00.476670 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.237698 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx"] Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.312481 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" event={"ID":"170162cb-4ce4-4fd9-8550-1eb64de6881b","Type":"ContainerStarted","Data":"48f9e784bf5020c4fd0d0ad988a18ced85f5f87020fa0c2736c2feb82d4769a3"} Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.676703 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-8l74h"] Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.678588 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8l74h" Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.813673 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1d65d85-e26f-42df-af6f-1b9c8a773d28-host\") pod \"crc-debug-8l74h\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " pod="openstack/crc-debug-8l74h" Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.813936 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgqwm\" (UniqueName: \"kubernetes.io/projected/d1d65d85-e26f-42df-af6f-1b9c8a773d28-kube-api-access-dgqwm\") pod \"crc-debug-8l74h\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " pod="openstack/crc-debug-8l74h" Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.916134 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1d65d85-e26f-42df-af6f-1b9c8a773d28-host\") pod \"crc-debug-8l74h\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " pod="openstack/crc-debug-8l74h" Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.916324 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgqwm\" (UniqueName: \"kubernetes.io/projected/d1d65d85-e26f-42df-af6f-1b9c8a773d28-kube-api-access-dgqwm\") pod \"crc-debug-8l74h\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " pod="openstack/crc-debug-8l74h" Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.916454 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1d65d85-e26f-42df-af6f-1b9c8a773d28-host\") pod \"crc-debug-8l74h\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " pod="openstack/crc-debug-8l74h" Jun 06 11:00:01 crc kubenswrapper[4911]: I0606 11:00:01.948282 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgqwm\" (UniqueName: \"kubernetes.io/projected/d1d65d85-e26f-42df-af6f-1b9c8a773d28-kube-api-access-dgqwm\") pod \"crc-debug-8l74h\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " pod="openstack/crc-debug-8l74h" Jun 06 11:00:02 crc kubenswrapper[4911]: I0606 11:00:02.029926 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8l74h" Jun 06 11:00:02 crc kubenswrapper[4911]: W0606 11:00:02.077305 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1d65d85_e26f_42df_af6f_1b9c8a773d28.slice/crio-1e2e2a7b418c776e78929dae02fdb62ea7c893d7d4b117cd42d850bde325a16e WatchSource:0}: Error finding container 1e2e2a7b418c776e78929dae02fdb62ea7c893d7d4b117cd42d850bde325a16e: Status 404 returned error can't find the container with id 1e2e2a7b418c776e78929dae02fdb62ea7c893d7d4b117cd42d850bde325a16e Jun 06 11:00:02 crc kubenswrapper[4911]: I0606 11:00:02.325976 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8l74h" event={"ID":"d1d65d85-e26f-42df-af6f-1b9c8a773d28","Type":"ContainerStarted","Data":"1e2e2a7b418c776e78929dae02fdb62ea7c893d7d4b117cd42d850bde325a16e"} Jun 06 11:00:02 crc kubenswrapper[4911]: I0606 11:00:02.331469 4911 generic.go:334] "Generic (PLEG): container finished" podID="170162cb-4ce4-4fd9-8550-1eb64de6881b" containerID="33d48214e319ddc9cc447f8f49408460eafdc495e6b6e1008dd385ab3a641c2f" exitCode=0 Jun 06 11:00:02 crc kubenswrapper[4911]: I0606 11:00:02.331519 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" event={"ID":"170162cb-4ce4-4fd9-8550-1eb64de6881b","Type":"ContainerDied","Data":"33d48214e319ddc9cc447f8f49408460eafdc495e6b6e1008dd385ab3a641c2f"} Jun 06 11:00:02 crc kubenswrapper[4911]: I0606 11:00:02.948975 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:00:02 crc kubenswrapper[4911]: E0606 11:00:02.949339 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:00:03 crc kubenswrapper[4911]: I0606 11:00:03.374121 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-8l74h" event={"ID":"d1d65d85-e26f-42df-af6f-1b9c8a773d28","Type":"ContainerStarted","Data":"723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b"} Jun 06 11:00:03 crc kubenswrapper[4911]: I0606 11:00:03.393285 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-8l74h" podStartSLOduration=2.393233676 podStartE2EDuration="2.393233676s" podCreationTimestamp="2025-06-06 11:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:00:03.39225063 +0000 UTC m=+6414.667676193" watchObservedRunningTime="2025-06-06 11:00:03.393233676 +0000 UTC m=+6414.668659229" Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.638339 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.795646 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/170162cb-4ce4-4fd9-8550-1eb64de6881b-config-volume\") pod \"170162cb-4ce4-4fd9-8550-1eb64de6881b\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.796500 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/170162cb-4ce4-4fd9-8550-1eb64de6881b-secret-volume\") pod \"170162cb-4ce4-4fd9-8550-1eb64de6881b\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.796704 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4xch\" (UniqueName: \"kubernetes.io/projected/170162cb-4ce4-4fd9-8550-1eb64de6881b-kube-api-access-q4xch\") pod \"170162cb-4ce4-4fd9-8550-1eb64de6881b\" (UID: \"170162cb-4ce4-4fd9-8550-1eb64de6881b\") " Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.796992 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/170162cb-4ce4-4fd9-8550-1eb64de6881b-config-volume" (OuterVolumeSpecName: "config-volume") pod "170162cb-4ce4-4fd9-8550-1eb64de6881b" (UID: "170162cb-4ce4-4fd9-8550-1eb64de6881b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.797434 4911 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/170162cb-4ce4-4fd9-8550-1eb64de6881b-config-volume\") on node \"crc\" DevicePath \"\"" Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.805836 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/170162cb-4ce4-4fd9-8550-1eb64de6881b-kube-api-access-q4xch" (OuterVolumeSpecName: "kube-api-access-q4xch") pod "170162cb-4ce4-4fd9-8550-1eb64de6881b" (UID: "170162cb-4ce4-4fd9-8550-1eb64de6881b"). InnerVolumeSpecName "kube-api-access-q4xch". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.806402 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/170162cb-4ce4-4fd9-8550-1eb64de6881b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "170162cb-4ce4-4fd9-8550-1eb64de6881b" (UID: "170162cb-4ce4-4fd9-8550-1eb64de6881b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.899811 4911 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/170162cb-4ce4-4fd9-8550-1eb64de6881b-secret-volume\") on node \"crc\" DevicePath \"\"" Jun 06 11:00:04 crc kubenswrapper[4911]: I0606 11:00:04.899856 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4xch\" (UniqueName: \"kubernetes.io/projected/170162cb-4ce4-4fd9-8550-1eb64de6881b-kube-api-access-q4xch\") on node \"crc\" DevicePath \"\"" Jun 06 11:00:05 crc kubenswrapper[4911]: I0606 11:00:05.396068 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" event={"ID":"170162cb-4ce4-4fd9-8550-1eb64de6881b","Type":"ContainerDied","Data":"48f9e784bf5020c4fd0d0ad988a18ced85f5f87020fa0c2736c2feb82d4769a3"} Jun 06 11:00:05 crc kubenswrapper[4911]: I0606 11:00:05.396496 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48f9e784bf5020c4fd0d0ad988a18ced85f5f87020fa0c2736c2feb82d4769a3" Jun 06 11:00:05 crc kubenswrapper[4911]: I0606 11:00:05.396255 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29153460-5bvtx" Jun 06 11:00:05 crc kubenswrapper[4911]: I0606 11:00:05.720282 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75"] Jun 06 11:00:05 crc kubenswrapper[4911]: I0606 11:00:05.731564 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29153415-fbt75"] Jun 06 11:00:05 crc kubenswrapper[4911]: I0606 11:00:05.961842 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec3b4e61-2f4a-4261-afd2-3f985059ad60" path="/var/lib/kubelet/pods/ec3b4e61-2f4a-4261-afd2-3f985059ad60/volumes" Jun 06 11:00:12 crc kubenswrapper[4911]: I0606 11:00:12.767248 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-8l74h"] Jun 06 11:00:12 crc kubenswrapper[4911]: I0606 11:00:12.768656 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-8l74h" podUID="d1d65d85-e26f-42df-af6f-1b9c8a773d28" containerName="container-00" containerID="cri-o://723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b" gracePeriod=2 Jun 06 11:00:12 crc kubenswrapper[4911]: I0606 11:00:12.780403 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-8l74h"] Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.012585 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8l74h" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.105117 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgqwm\" (UniqueName: \"kubernetes.io/projected/d1d65d85-e26f-42df-af6f-1b9c8a773d28-kube-api-access-dgqwm\") pod \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.105323 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1d65d85-e26f-42df-af6f-1b9c8a773d28-host\") pod \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\" (UID: \"d1d65d85-e26f-42df-af6f-1b9c8a773d28\") " Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.105424 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d1d65d85-e26f-42df-af6f-1b9c8a773d28-host" (OuterVolumeSpecName: "host") pod "d1d65d85-e26f-42df-af6f-1b9c8a773d28" (UID: "d1d65d85-e26f-42df-af6f-1b9c8a773d28"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.106127 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d1d65d85-e26f-42df-af6f-1b9c8a773d28-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.112725 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1d65d85-e26f-42df-af6f-1b9c8a773d28-kube-api-access-dgqwm" (OuterVolumeSpecName: "kube-api-access-dgqwm") pod "d1d65d85-e26f-42df-af6f-1b9c8a773d28" (UID: "d1d65d85-e26f-42df-af6f-1b9c8a773d28"). InnerVolumeSpecName "kube-api-access-dgqwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.208268 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgqwm\" (UniqueName: \"kubernetes.io/projected/d1d65d85-e26f-42df-af6f-1b9c8a773d28-kube-api-access-dgqwm\") on node \"crc\" DevicePath \"\"" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.472901 4911 generic.go:334] "Generic (PLEG): container finished" podID="d1d65d85-e26f-42df-af6f-1b9c8a773d28" containerID="723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b" exitCode=0 Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.472932 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-8l74h" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.472966 4911 scope.go:117] "RemoveContainer" containerID="723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.509637 4911 scope.go:117] "RemoveContainer" containerID="723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b" Jun 06 11:00:13 crc kubenswrapper[4911]: E0606 11:00:13.510222 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b\": container with ID starting with 723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b not found: ID does not exist" containerID="723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.510261 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b"} err="failed to get container status \"723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b\": rpc error: code = NotFound desc = could not find container \"723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b\": container with ID starting with 723823db59dd0b1e255459913088ff55bf2fb8555082ea787bcfc1a73b0e660b not found: ID does not exist" Jun 06 11:00:13 crc kubenswrapper[4911]: I0606 11:00:13.961051 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1d65d85-e26f-42df-af6f-1b9c8a773d28" path="/var/lib/kubelet/pods/d1d65d85-e26f-42df-af6f-1b9c8a773d28/volumes" Jun 06 11:00:15 crc kubenswrapper[4911]: I0606 11:00:15.948521 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:00:15 crc kubenswrapper[4911]: E0606 11:00:15.949649 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:00:24 crc kubenswrapper[4911]: I0606 11:00:24.903474 4911 scope.go:117] "RemoveContainer" containerID="946c71feb9751b5fbcb3ab17c19f17388fd24479f8ae5e948c4546cd907ac73a" Jun 06 11:00:26 crc kubenswrapper[4911]: I0606 11:00:26.948871 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:00:26 crc kubenswrapper[4911]: E0606 11:00:26.949697 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:00:39 crc kubenswrapper[4911]: I0606 11:00:39.958585 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:00:39 crc kubenswrapper[4911]: E0606 11:00:39.959429 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:00:52 crc kubenswrapper[4911]: I0606 11:00:52.948556 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:00:52 crc kubenswrapper[4911]: E0606 11:00:52.949696 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.160622 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29153461-hwpvm"] Jun 06 11:01:00 crc kubenswrapper[4911]: E0606 11:01:00.161822 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="170162cb-4ce4-4fd9-8550-1eb64de6881b" containerName="collect-profiles" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.161835 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="170162cb-4ce4-4fd9-8550-1eb64de6881b" containerName="collect-profiles" Jun 06 11:01:00 crc kubenswrapper[4911]: E0606 11:01:00.161850 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1d65d85-e26f-42df-af6f-1b9c8a773d28" containerName="container-00" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.161856 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1d65d85-e26f-42df-af6f-1b9c8a773d28" containerName="container-00" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.162079 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="170162cb-4ce4-4fd9-8550-1eb64de6881b" containerName="collect-profiles" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.162115 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1d65d85-e26f-42df-af6f-1b9c8a773d28" containerName="container-00" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.162825 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.174615 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29153461-hwpvm"] Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.228497 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-combined-ca-bundle\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.228554 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlrg6\" (UniqueName: \"kubernetes.io/projected/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-kube-api-access-xlrg6\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.228587 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-fernet-keys\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.228942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-config-data\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.331212 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-combined-ca-bundle\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.331274 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlrg6\" (UniqueName: \"kubernetes.io/projected/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-kube-api-access-xlrg6\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.331330 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-fernet-keys\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.331569 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-config-data\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.339609 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-config-data\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.339666 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-fernet-keys\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.343883 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-combined-ca-bundle\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.351712 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlrg6\" (UniqueName: \"kubernetes.io/projected/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-kube-api-access-xlrg6\") pod \"keystone-cron-29153461-hwpvm\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:00 crc kubenswrapper[4911]: I0606 11:01:00.490667 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:01 crc kubenswrapper[4911]: I0606 11:01:01.159787 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29153461-hwpvm"] Jun 06 11:01:01 crc kubenswrapper[4911]: I0606 11:01:01.986777 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153461-hwpvm" event={"ID":"f1ee55fa-6ee5-4590-9f82-3421b3b803fe","Type":"ContainerStarted","Data":"f61013a3f173331b5281680714bf9d1687d9f414f2eadf89dfb1c353a1e0725a"} Jun 06 11:01:01 crc kubenswrapper[4911]: I0606 11:01:01.987241 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153461-hwpvm" event={"ID":"f1ee55fa-6ee5-4590-9f82-3421b3b803fe","Type":"ContainerStarted","Data":"99668e778418a3a6a286828f1e571c08562b41c570ff6cba0bc6cec4f12b4041"} Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.013746 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29153461-hwpvm" podStartSLOduration=2.013726931 podStartE2EDuration="2.013726931s" podCreationTimestamp="2025-06-06 11:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:01:02.005411906 +0000 UTC m=+6473.280837499" watchObservedRunningTime="2025-06-06 11:01:02.013726931 +0000 UTC m=+6473.289152474" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.218757 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-lscnf"] Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.220375 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lscnf" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.276009 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f88e7fef-0467-4567-936d-222ad9cd718d-host\") pod \"crc-debug-lscnf\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " pod="openstack/crc-debug-lscnf" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.276184 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n8hd\" (UniqueName: \"kubernetes.io/projected/f88e7fef-0467-4567-936d-222ad9cd718d-kube-api-access-6n8hd\") pod \"crc-debug-lscnf\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " pod="openstack/crc-debug-lscnf" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.379488 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f88e7fef-0467-4567-936d-222ad9cd718d-host\") pod \"crc-debug-lscnf\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " pod="openstack/crc-debug-lscnf" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.379653 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f88e7fef-0467-4567-936d-222ad9cd718d-host\") pod \"crc-debug-lscnf\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " pod="openstack/crc-debug-lscnf" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.379664 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n8hd\" (UniqueName: \"kubernetes.io/projected/f88e7fef-0467-4567-936d-222ad9cd718d-kube-api-access-6n8hd\") pod \"crc-debug-lscnf\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " pod="openstack/crc-debug-lscnf" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.422469 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n8hd\" (UniqueName: \"kubernetes.io/projected/f88e7fef-0467-4567-936d-222ad9cd718d-kube-api-access-6n8hd\") pod \"crc-debug-lscnf\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " pod="openstack/crc-debug-lscnf" Jun 06 11:01:02 crc kubenswrapper[4911]: I0606 11:01:02.543771 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lscnf" Jun 06 11:01:03 crc kubenswrapper[4911]: I0606 11:01:03.003377 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-lscnf" event={"ID":"f88e7fef-0467-4567-936d-222ad9cd718d","Type":"ContainerStarted","Data":"fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf"} Jun 06 11:01:03 crc kubenswrapper[4911]: I0606 11:01:03.003856 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-lscnf" event={"ID":"f88e7fef-0467-4567-936d-222ad9cd718d","Type":"ContainerStarted","Data":"32d2e3072cf09b3a263bde33fbade761be2a6438326c9ff487b02621402188f7"} Jun 06 11:01:03 crc kubenswrapper[4911]: I0606 11:01:03.036611 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-lscnf" podStartSLOduration=1.036585712 podStartE2EDuration="1.036585712s" podCreationTimestamp="2025-06-06 11:01:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:01:03.019230424 +0000 UTC m=+6474.294655977" watchObservedRunningTime="2025-06-06 11:01:03.036585712 +0000 UTC m=+6474.312011275" Jun 06 11:01:04 crc kubenswrapper[4911]: I0606 11:01:04.015197 4911 generic.go:334] "Generic (PLEG): container finished" podID="f1ee55fa-6ee5-4590-9f82-3421b3b803fe" containerID="f61013a3f173331b5281680714bf9d1687d9f414f2eadf89dfb1c353a1e0725a" exitCode=0 Jun 06 11:01:04 crc kubenswrapper[4911]: I0606 11:01:04.015272 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153461-hwpvm" event={"ID":"f1ee55fa-6ee5-4590-9f82-3421b3b803fe","Type":"ContainerDied","Data":"f61013a3f173331b5281680714bf9d1687d9f414f2eadf89dfb1c353a1e0725a"} Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.418682 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.583492 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-combined-ca-bundle\") pod \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.583634 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-config-data\") pod \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.584564 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-fernet-keys\") pod \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.584797 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlrg6\" (UniqueName: \"kubernetes.io/projected/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-kube-api-access-xlrg6\") pod \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\" (UID: \"f1ee55fa-6ee5-4590-9f82-3421b3b803fe\") " Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.591516 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f1ee55fa-6ee5-4590-9f82-3421b3b803fe" (UID: "f1ee55fa-6ee5-4590-9f82-3421b3b803fe"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.592212 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-kube-api-access-xlrg6" (OuterVolumeSpecName: "kube-api-access-xlrg6") pod "f1ee55fa-6ee5-4590-9f82-3421b3b803fe" (UID: "f1ee55fa-6ee5-4590-9f82-3421b3b803fe"). InnerVolumeSpecName "kube-api-access-xlrg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.619845 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1ee55fa-6ee5-4590-9f82-3421b3b803fe" (UID: "f1ee55fa-6ee5-4590-9f82-3421b3b803fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.642730 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-config-data" (OuterVolumeSpecName: "config-data") pod "f1ee55fa-6ee5-4590-9f82-3421b3b803fe" (UID: "f1ee55fa-6ee5-4590-9f82-3421b3b803fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.687065 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlrg6\" (UniqueName: \"kubernetes.io/projected/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-kube-api-access-xlrg6\") on node \"crc\" DevicePath \"\"" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.687139 4911 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.687153 4911 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-config-data\") on node \"crc\" DevicePath \"\"" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.687167 4911 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1ee55fa-6ee5-4590-9f82-3421b3b803fe-fernet-keys\") on node \"crc\" DevicePath \"\"" Jun 06 11:01:06 crc kubenswrapper[4911]: I0606 11:01:06.947992 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:01:06 crc kubenswrapper[4911]: E0606 11:01:06.948436 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:01:07 crc kubenswrapper[4911]: I0606 11:01:07.043077 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29153461-hwpvm" event={"ID":"f1ee55fa-6ee5-4590-9f82-3421b3b803fe","Type":"ContainerDied","Data":"99668e778418a3a6a286828f1e571c08562b41c570ff6cba0bc6cec4f12b4041"} Jun 06 11:01:07 crc kubenswrapper[4911]: I0606 11:01:07.043132 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99668e778418a3a6a286828f1e571c08562b41c570ff6cba0bc6cec4f12b4041" Jun 06 11:01:07 crc kubenswrapper[4911]: I0606 11:01:07.043181 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29153461-hwpvm" Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.187608 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-lscnf"] Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.189041 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-lscnf" podUID="f88e7fef-0467-4567-936d-222ad9cd718d" containerName="container-00" containerID="cri-o://fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf" gracePeriod=2 Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.195513 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-lscnf"] Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.294279 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lscnf" Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.424349 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f88e7fef-0467-4567-936d-222ad9cd718d-host\") pod \"f88e7fef-0467-4567-936d-222ad9cd718d\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.424487 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n8hd\" (UniqueName: \"kubernetes.io/projected/f88e7fef-0467-4567-936d-222ad9cd718d-kube-api-access-6n8hd\") pod \"f88e7fef-0467-4567-936d-222ad9cd718d\" (UID: \"f88e7fef-0467-4567-936d-222ad9cd718d\") " Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.424650 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f88e7fef-0467-4567-936d-222ad9cd718d-host" (OuterVolumeSpecName: "host") pod "f88e7fef-0467-4567-936d-222ad9cd718d" (UID: "f88e7fef-0467-4567-936d-222ad9cd718d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.424964 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f88e7fef-0467-4567-936d-222ad9cd718d-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.437916 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88e7fef-0467-4567-936d-222ad9cd718d-kube-api-access-6n8hd" (OuterVolumeSpecName: "kube-api-access-6n8hd") pod "f88e7fef-0467-4567-936d-222ad9cd718d" (UID: "f88e7fef-0467-4567-936d-222ad9cd718d"). InnerVolumeSpecName "kube-api-access-6n8hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.527796 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n8hd\" (UniqueName: \"kubernetes.io/projected/f88e7fef-0467-4567-936d-222ad9cd718d-kube-api-access-6n8hd\") on node \"crc\" DevicePath \"\"" Jun 06 11:01:13 crc kubenswrapper[4911]: I0606 11:01:13.961333 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88e7fef-0467-4567-936d-222ad9cd718d" path="/var/lib/kubelet/pods/f88e7fef-0467-4567-936d-222ad9cd718d/volumes" Jun 06 11:01:14 crc kubenswrapper[4911]: I0606 11:01:14.116931 4911 generic.go:334] "Generic (PLEG): container finished" podID="f88e7fef-0467-4567-936d-222ad9cd718d" containerID="fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf" exitCode=0 Jun 06 11:01:14 crc kubenswrapper[4911]: I0606 11:01:14.117012 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-lscnf" Jun 06 11:01:14 crc kubenswrapper[4911]: I0606 11:01:14.117038 4911 scope.go:117] "RemoveContainer" containerID="fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf" Jun 06 11:01:14 crc kubenswrapper[4911]: I0606 11:01:14.140642 4911 scope.go:117] "RemoveContainer" containerID="fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf" Jun 06 11:01:14 crc kubenswrapper[4911]: E0606 11:01:14.141227 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf\": container with ID starting with fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf not found: ID does not exist" containerID="fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf" Jun 06 11:01:14 crc kubenswrapper[4911]: I0606 11:01:14.141446 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf"} err="failed to get container status \"fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf\": rpc error: code = NotFound desc = could not find container \"fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf\": container with ID starting with fd9f76f9a6d666255cdeb2a5cc3a1c1b78130913f03e3222893411c3181acfcf not found: ID does not exist" Jun 06 11:01:14 crc kubenswrapper[4911]: E0606 11:01:14.188616 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf88e7fef_0467_4567_936d_222ad9cd718d.slice/crio-32d2e3072cf09b3a263bde33fbade761be2a6438326c9ff487b02621402188f7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf88e7fef_0467_4567_936d_222ad9cd718d.slice\": RecentStats: unable to find data in memory cache]" Jun 06 11:01:17 crc kubenswrapper[4911]: I0606 11:01:17.949032 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:01:17 crc kubenswrapper[4911]: E0606 11:01:17.950329 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:01:31 crc kubenswrapper[4911]: I0606 11:01:31.948894 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:01:31 crc kubenswrapper[4911]: E0606 11:01:31.950115 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:01:43 crc kubenswrapper[4911]: I0606 11:01:43.948622 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:01:43 crc kubenswrapper[4911]: E0606 11:01:43.949470 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:01:54 crc kubenswrapper[4911]: I0606 11:01:54.948359 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:01:54 crc kubenswrapper[4911]: E0606 11:01:54.949273 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.605572 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-tvt6v"] Jun 06 11:02:01 crc kubenswrapper[4911]: E0606 11:02:01.607421 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1ee55fa-6ee5-4590-9f82-3421b3b803fe" containerName="keystone-cron" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.607445 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1ee55fa-6ee5-4590-9f82-3421b3b803fe" containerName="keystone-cron" Jun 06 11:02:01 crc kubenswrapper[4911]: E0606 11:02:01.607483 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88e7fef-0467-4567-936d-222ad9cd718d" containerName="container-00" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.607521 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88e7fef-0467-4567-936d-222ad9cd718d" containerName="container-00" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.608066 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f88e7fef-0467-4567-936d-222ad9cd718d" containerName="container-00" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.608102 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1ee55fa-6ee5-4590-9f82-3421b3b803fe" containerName="keystone-cron" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.609249 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tvt6v" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.659573 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqqhk\" (UniqueName: \"kubernetes.io/projected/29c2d8fc-8858-4900-b37b-812f80e5fa7d-kube-api-access-mqqhk\") pod \"crc-debug-tvt6v\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " pod="openstack/crc-debug-tvt6v" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.659734 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c2d8fc-8858-4900-b37b-812f80e5fa7d-host\") pod \"crc-debug-tvt6v\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " pod="openstack/crc-debug-tvt6v" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.761790 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c2d8fc-8858-4900-b37b-812f80e5fa7d-host\") pod \"crc-debug-tvt6v\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " pod="openstack/crc-debug-tvt6v" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.761975 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c2d8fc-8858-4900-b37b-812f80e5fa7d-host\") pod \"crc-debug-tvt6v\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " pod="openstack/crc-debug-tvt6v" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.761995 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqqhk\" (UniqueName: \"kubernetes.io/projected/29c2d8fc-8858-4900-b37b-812f80e5fa7d-kube-api-access-mqqhk\") pod \"crc-debug-tvt6v\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " pod="openstack/crc-debug-tvt6v" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.793653 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqqhk\" (UniqueName: \"kubernetes.io/projected/29c2d8fc-8858-4900-b37b-812f80e5fa7d-kube-api-access-mqqhk\") pod \"crc-debug-tvt6v\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " pod="openstack/crc-debug-tvt6v" Jun 06 11:02:01 crc kubenswrapper[4911]: I0606 11:02:01.930514 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tvt6v" Jun 06 11:02:02 crc kubenswrapper[4911]: I0606 11:02:02.650848 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tvt6v" event={"ID":"29c2d8fc-8858-4900-b37b-812f80e5fa7d","Type":"ContainerStarted","Data":"6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3"} Jun 06 11:02:02 crc kubenswrapper[4911]: I0606 11:02:02.651923 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-tvt6v" event={"ID":"29c2d8fc-8858-4900-b37b-812f80e5fa7d","Type":"ContainerStarted","Data":"71f341f85d5dd916e417d12361cd9bb6ac2e34e73dbe3619aa68c5ace7875aa8"} Jun 06 11:02:02 crc kubenswrapper[4911]: I0606 11:02:02.674803 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-tvt6v" podStartSLOduration=1.674782924 podStartE2EDuration="1.674782924s" podCreationTimestamp="2025-06-06 11:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:02:02.665357971 +0000 UTC m=+6533.940783514" watchObservedRunningTime="2025-06-06 11:02:02.674782924 +0000 UTC m=+6533.950208467" Jun 06 11:02:06 crc kubenswrapper[4911]: I0606 11:02:06.948566 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:02:06 crc kubenswrapper[4911]: E0606 11:02:06.949566 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.617239 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-tvt6v"] Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.618447 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-tvt6v" podUID="29c2d8fc-8858-4900-b37b-812f80e5fa7d" containerName="container-00" containerID="cri-o://6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3" gracePeriod=2 Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.625215 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-tvt6v"] Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.737154 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tvt6v" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.755193 4911 generic.go:334] "Generic (PLEG): container finished" podID="29c2d8fc-8858-4900-b37b-812f80e5fa7d" containerID="6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3" exitCode=0 Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.755248 4911 scope.go:117] "RemoveContainer" containerID="6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.755286 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-tvt6v" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.781936 4911 scope.go:117] "RemoveContainer" containerID="6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3" Jun 06 11:02:12 crc kubenswrapper[4911]: E0606 11:02:12.783266 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3\": container with ID starting with 6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3 not found: ID does not exist" containerID="6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.783362 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3"} err="failed to get container status \"6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3\": rpc error: code = NotFound desc = could not find container \"6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3\": container with ID starting with 6fae3eb493ca84a0c96575b33f0131f7aa360ffd8d7424578c10c60b0937e3f3 not found: ID does not exist" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.808614 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqqhk\" (UniqueName: \"kubernetes.io/projected/29c2d8fc-8858-4900-b37b-812f80e5fa7d-kube-api-access-mqqhk\") pod \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.808753 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c2d8fc-8858-4900-b37b-812f80e5fa7d-host\") pod \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\" (UID: \"29c2d8fc-8858-4900-b37b-812f80e5fa7d\") " Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.808932 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29c2d8fc-8858-4900-b37b-812f80e5fa7d-host" (OuterVolumeSpecName: "host") pod "29c2d8fc-8858-4900-b37b-812f80e5fa7d" (UID: "29c2d8fc-8858-4900-b37b-812f80e5fa7d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.809399 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29c2d8fc-8858-4900-b37b-812f80e5fa7d-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.815991 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c2d8fc-8858-4900-b37b-812f80e5fa7d-kube-api-access-mqqhk" (OuterVolumeSpecName: "kube-api-access-mqqhk") pod "29c2d8fc-8858-4900-b37b-812f80e5fa7d" (UID: "29c2d8fc-8858-4900-b37b-812f80e5fa7d"). InnerVolumeSpecName "kube-api-access-mqqhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:02:12 crc kubenswrapper[4911]: I0606 11:02:12.912537 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqqhk\" (UniqueName: \"kubernetes.io/projected/29c2d8fc-8858-4900-b37b-812f80e5fa7d-kube-api-access-mqqhk\") on node \"crc\" DevicePath \"\"" Jun 06 11:02:13 crc kubenswrapper[4911]: I0606 11:02:13.959475 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c2d8fc-8858-4900-b37b-812f80e5fa7d" path="/var/lib/kubelet/pods/29c2d8fc-8858-4900-b37b-812f80e5fa7d/volumes" Jun 06 11:02:21 crc kubenswrapper[4911]: I0606 11:02:21.951579 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:02:21 crc kubenswrapper[4911]: E0606 11:02:21.952912 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:02:25 crc kubenswrapper[4911]: I0606 11:02:25.034778 4911 scope.go:117] "RemoveContainer" containerID="6ebba9e7c12b151a4fe5d8d737e9cecf025b739f2823198c2d48b3bc52b6b77a" Jun 06 11:02:35 crc kubenswrapper[4911]: I0606 11:02:35.947810 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:02:35 crc kubenswrapper[4911]: E0606 11:02:35.950141 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.584158 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zrwfs/must-gather-nljpf"] Jun 06 11:02:38 crc kubenswrapper[4911]: E0606 11:02:38.585673 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c2d8fc-8858-4900-b37b-812f80e5fa7d" containerName="container-00" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.585693 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c2d8fc-8858-4900-b37b-812f80e5fa7d" containerName="container-00" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.585990 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c2d8fc-8858-4900-b37b-812f80e5fa7d" containerName="container-00" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.587332 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.590210 4911 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-zrwfs"/"default-dockercfg-5bp2v" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.590867 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zrwfs"/"kube-root-ca.crt" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.591162 4911 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zrwfs"/"openshift-service-ca.crt" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.612821 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zrwfs/must-gather-nljpf"] Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.647991 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a572218a-ecfd-45ec-8d89-0489cb95a11b-must-gather-output\") pod \"must-gather-nljpf\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.648222 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbx78\" (UniqueName: \"kubernetes.io/projected/a572218a-ecfd-45ec-8d89-0489cb95a11b-kube-api-access-qbx78\") pod \"must-gather-nljpf\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.750435 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a572218a-ecfd-45ec-8d89-0489cb95a11b-must-gather-output\") pod \"must-gather-nljpf\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.750573 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbx78\" (UniqueName: \"kubernetes.io/projected/a572218a-ecfd-45ec-8d89-0489cb95a11b-kube-api-access-qbx78\") pod \"must-gather-nljpf\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.750875 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a572218a-ecfd-45ec-8d89-0489cb95a11b-must-gather-output\") pod \"must-gather-nljpf\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.770413 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbx78\" (UniqueName: \"kubernetes.io/projected/a572218a-ecfd-45ec-8d89-0489cb95a11b-kube-api-access-qbx78\") pod \"must-gather-nljpf\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:38 crc kubenswrapper[4911]: I0606 11:02:38.913935 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:02:39 crc kubenswrapper[4911]: I0606 11:02:39.680722 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zrwfs/must-gather-nljpf"] Jun 06 11:02:40 crc kubenswrapper[4911]: I0606 11:02:40.093030 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/must-gather-nljpf" event={"ID":"a572218a-ecfd-45ec-8d89-0489cb95a11b","Type":"ContainerStarted","Data":"bb5b4a79dadd08b6bfa8c2ca8778d9b151857a0194dc622908a41003376161c8"} Jun 06 11:02:48 crc kubenswrapper[4911]: I0606 11:02:48.185757 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/must-gather-nljpf" event={"ID":"a572218a-ecfd-45ec-8d89-0489cb95a11b","Type":"ContainerStarted","Data":"d13ec06fe2fd28ee2ea2f78185994d76ad29418ee5ef90f49315ea7cb3fd1c4b"} Jun 06 11:02:48 crc kubenswrapper[4911]: I0606 11:02:48.948736 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:02:48 crc kubenswrapper[4911]: E0606 11:02:48.950076 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:02:49 crc kubenswrapper[4911]: I0606 11:02:49.203910 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/must-gather-nljpf" event={"ID":"a572218a-ecfd-45ec-8d89-0489cb95a11b","Type":"ContainerStarted","Data":"5de373ae6f1c5c021797cd5afcc29caf1fc9639afa403cc3ced3c6d8e08aff20"} Jun 06 11:02:49 crc kubenswrapper[4911]: I0606 11:02:49.227919 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zrwfs/must-gather-nljpf" podStartSLOduration=3.16047519 podStartE2EDuration="11.227899538s" podCreationTimestamp="2025-06-06 11:02:38 +0000 UTC" firstStartedPulling="2025-06-06 11:02:39.704983222 +0000 UTC m=+6570.980408775" lastFinishedPulling="2025-06-06 11:02:47.77240758 +0000 UTC m=+6579.047833123" observedRunningTime="2025-06-06 11:02:49.226372229 +0000 UTC m=+6580.501797772" watchObservedRunningTime="2025-06-06 11:02:49.227899538 +0000 UTC m=+6580.503325081" Jun 06 11:02:52 crc kubenswrapper[4911]: E0606 11:02:52.346149 4911 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.108:43626->38.129.56.108:33257: write tcp 38.129.56.108:43626->38.129.56.108:33257: write: broken pipe Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.585377 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-q8wz5"] Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.588163 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.659385 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad147ecb-393b-4645-b62e-6337b7cdf593-host\") pod \"crc-debug-q8wz5\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.659566 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt9ql\" (UniqueName: \"kubernetes.io/projected/ad147ecb-393b-4645-b62e-6337b7cdf593-kube-api-access-vt9ql\") pod \"crc-debug-q8wz5\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.762460 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad147ecb-393b-4645-b62e-6337b7cdf593-host\") pod \"crc-debug-q8wz5\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.762570 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt9ql\" (UniqueName: \"kubernetes.io/projected/ad147ecb-393b-4645-b62e-6337b7cdf593-kube-api-access-vt9ql\") pod \"crc-debug-q8wz5\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.762648 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad147ecb-393b-4645-b62e-6337b7cdf593-host\") pod \"crc-debug-q8wz5\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.797332 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt9ql\" (UniqueName: \"kubernetes.io/projected/ad147ecb-393b-4645-b62e-6337b7cdf593-kube-api-access-vt9ql\") pod \"crc-debug-q8wz5\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: I0606 11:02:53.919884 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:02:53 crc kubenswrapper[4911]: W0606 11:02:53.966315 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad147ecb_393b_4645_b62e_6337b7cdf593.slice/crio-301808d5f728c08427bf6edd987e3664dc8e987446f493b870a23e05bc28f970 WatchSource:0}: Error finding container 301808d5f728c08427bf6edd987e3664dc8e987446f493b870a23e05bc28f970: Status 404 returned error can't find the container with id 301808d5f728c08427bf6edd987e3664dc8e987446f493b870a23e05bc28f970 Jun 06 11:02:54 crc kubenswrapper[4911]: I0606 11:02:54.262681 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" event={"ID":"ad147ecb-393b-4645-b62e-6337b7cdf593","Type":"ContainerStarted","Data":"88bf0866b71b196c17e5f681a200bff5842a537d51ed00f90bcb4746a45a886b"} Jun 06 11:02:54 crc kubenswrapper[4911]: I0606 11:02:54.263154 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" event={"ID":"ad147ecb-393b-4645-b62e-6337b7cdf593","Type":"ContainerStarted","Data":"301808d5f728c08427bf6edd987e3664dc8e987446f493b870a23e05bc28f970"} Jun 06 11:02:54 crc kubenswrapper[4911]: I0606 11:02:54.277351 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" podStartSLOduration=1.27733224 podStartE2EDuration="1.27733224s" podCreationTimestamp="2025-06-06 11:02:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:02:54.275647526 +0000 UTC m=+6585.551073069" watchObservedRunningTime="2025-06-06 11:02:54.27733224 +0000 UTC m=+6585.552757783" Jun 06 11:02:59 crc kubenswrapper[4911]: I0606 11:02:59.959408 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:02:59 crc kubenswrapper[4911]: E0606 11:02:59.963047 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.011416 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-nv5t7"] Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.013932 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-nv5t7" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.055591 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjswk\" (UniqueName: \"kubernetes.io/projected/17927e54-a8c6-4987-994d-7a572be8f3c9-kube-api-access-cjswk\") pod \"crc-debug-nv5t7\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " pod="openstack/crc-debug-nv5t7" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.055902 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/17927e54-a8c6-4987-994d-7a572be8f3c9-host\") pod \"crc-debug-nv5t7\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " pod="openstack/crc-debug-nv5t7" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.158012 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/17927e54-a8c6-4987-994d-7a572be8f3c9-host\") pod \"crc-debug-nv5t7\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " pod="openstack/crc-debug-nv5t7" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.158072 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjswk\" (UniqueName: \"kubernetes.io/projected/17927e54-a8c6-4987-994d-7a572be8f3c9-kube-api-access-cjswk\") pod \"crc-debug-nv5t7\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " pod="openstack/crc-debug-nv5t7" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.158521 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/17927e54-a8c6-4987-994d-7a572be8f3c9-host\") pod \"crc-debug-nv5t7\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " pod="openstack/crc-debug-nv5t7" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.182982 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjswk\" (UniqueName: \"kubernetes.io/projected/17927e54-a8c6-4987-994d-7a572be8f3c9-kube-api-access-cjswk\") pod \"crc-debug-nv5t7\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " pod="openstack/crc-debug-nv5t7" Jun 06 11:03:02 crc kubenswrapper[4911]: I0606 11:03:02.334619 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-nv5t7" Jun 06 11:03:03 crc kubenswrapper[4911]: I0606 11:03:03.356148 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-nv5t7" event={"ID":"17927e54-a8c6-4987-994d-7a572be8f3c9","Type":"ContainerStarted","Data":"ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668"} Jun 06 11:03:03 crc kubenswrapper[4911]: I0606 11:03:03.357439 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-nv5t7" event={"ID":"17927e54-a8c6-4987-994d-7a572be8f3c9","Type":"ContainerStarted","Data":"cb2a9a52ddffd489b9cb320e2f7d124399ad60a635ea6ad6f51ecf49fa7b355e"} Jun 06 11:03:12 crc kubenswrapper[4911]: I0606 11:03:12.949245 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:03:12 crc kubenswrapper[4911]: E0606 11:03:12.950525 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.079437 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-nv5t7" podStartSLOduration=12.07941429 podStartE2EDuration="12.07941429s" podCreationTimestamp="2025-06-06 11:03:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:03:03.375637166 +0000 UTC m=+6594.651062759" watchObservedRunningTime="2025-06-06 11:03:13.07941429 +0000 UTC m=+6604.354839853" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.081604 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-nv5t7"] Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.081871 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-nv5t7" podUID="17927e54-a8c6-4987-994d-7a572be8f3c9" containerName="container-00" containerID="cri-o://ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668" gracePeriod=2 Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.099142 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-nv5t7"] Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.203389 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-nv5t7" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.354530 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjswk\" (UniqueName: \"kubernetes.io/projected/17927e54-a8c6-4987-994d-7a572be8f3c9-kube-api-access-cjswk\") pod \"17927e54-a8c6-4987-994d-7a572be8f3c9\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.355126 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/17927e54-a8c6-4987-994d-7a572be8f3c9-host\") pod \"17927e54-a8c6-4987-994d-7a572be8f3c9\" (UID: \"17927e54-a8c6-4987-994d-7a572be8f3c9\") " Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.355191 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/17927e54-a8c6-4987-994d-7a572be8f3c9-host" (OuterVolumeSpecName: "host") pod "17927e54-a8c6-4987-994d-7a572be8f3c9" (UID: "17927e54-a8c6-4987-994d-7a572be8f3c9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.355774 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/17927e54-a8c6-4987-994d-7a572be8f3c9-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.364545 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17927e54-a8c6-4987-994d-7a572be8f3c9-kube-api-access-cjswk" (OuterVolumeSpecName: "kube-api-access-cjswk") pod "17927e54-a8c6-4987-994d-7a572be8f3c9" (UID: "17927e54-a8c6-4987-994d-7a572be8f3c9"). InnerVolumeSpecName "kube-api-access-cjswk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.458543 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjswk\" (UniqueName: \"kubernetes.io/projected/17927e54-a8c6-4987-994d-7a572be8f3c9-kube-api-access-cjswk\") on node \"crc\" DevicePath \"\"" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.481207 4911 generic.go:334] "Generic (PLEG): container finished" podID="17927e54-a8c6-4987-994d-7a572be8f3c9" containerID="ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668" exitCode=0 Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.481272 4911 scope.go:117] "RemoveContainer" containerID="ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.481338 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-nv5t7" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.513865 4911 scope.go:117] "RemoveContainer" containerID="ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668" Jun 06 11:03:13 crc kubenswrapper[4911]: E0606 11:03:13.514640 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668\": container with ID starting with ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668 not found: ID does not exist" containerID="ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.514717 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668"} err="failed to get container status \"ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668\": rpc error: code = NotFound desc = could not find container \"ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668\": container with ID starting with ded570d5ce6c3078da154425d8992246a540d0c3bc368efdba2d1303b989b668 not found: ID does not exist" Jun 06 11:03:13 crc kubenswrapper[4911]: I0606 11:03:13.961847 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17927e54-a8c6-4987-994d-7a572be8f3c9" path="/var/lib/kubelet/pods/17927e54-a8c6-4987-994d-7a572be8f3c9/volumes" Jun 06 11:03:26 crc kubenswrapper[4911]: I0606 11:03:26.948257 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:03:26 crc kubenswrapper[4911]: E0606 11:03:26.949118 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:03:41 crc kubenswrapper[4911]: I0606 11:03:41.948372 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:03:41 crc kubenswrapper[4911]: E0606 11:03:41.949287 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.002568 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-55rwb"] Jun 06 11:03:52 crc kubenswrapper[4911]: E0606 11:03:52.004161 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17927e54-a8c6-4987-994d-7a572be8f3c9" containerName="container-00" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.004181 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="17927e54-a8c6-4987-994d-7a572be8f3c9" containerName="container-00" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.004519 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="17927e54-a8c6-4987-994d-7a572be8f3c9" containerName="container-00" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.006405 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.014498 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-55rwb"] Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.117108 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-catalog-content\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.117258 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpd4j\" (UniqueName: \"kubernetes.io/projected/de8724e2-bbe9-420a-9a2c-87b45d80211a-kube-api-access-fpd4j\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.117387 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-utilities\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.220529 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-utilities\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.220710 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-catalog-content\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.220796 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpd4j\" (UniqueName: \"kubernetes.io/projected/de8724e2-bbe9-420a-9a2c-87b45d80211a-kube-api-access-fpd4j\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.221445 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-utilities\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.221655 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-catalog-content\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.247869 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpd4j\" (UniqueName: \"kubernetes.io/projected/de8724e2-bbe9-420a-9a2c-87b45d80211a-kube-api-access-fpd4j\") pod \"redhat-marketplace-55rwb\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:52 crc kubenswrapper[4911]: I0606 11:03:52.337308 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:03:53 crc kubenswrapper[4911]: I0606 11:03:53.113644 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-55rwb"] Jun 06 11:03:53 crc kubenswrapper[4911]: I0606 11:03:53.957054 4911 generic.go:334] "Generic (PLEG): container finished" podID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerID="e8eb71724315ce551cda547716649eb1fbf8dddf58383f3b959671078188df7a" exitCode=0 Jun 06 11:03:53 crc kubenswrapper[4911]: I0606 11:03:53.959893 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-55rwb" event={"ID":"de8724e2-bbe9-420a-9a2c-87b45d80211a","Type":"ContainerDied","Data":"e8eb71724315ce551cda547716649eb1fbf8dddf58383f3b959671078188df7a"} Jun 06 11:03:53 crc kubenswrapper[4911]: I0606 11:03:53.959953 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-55rwb" event={"ID":"de8724e2-bbe9-420a-9a2c-87b45d80211a","Type":"ContainerStarted","Data":"b00f22a7d2171b3c609013c5dcd90c73c710c99b4f3c9f572379e34c64418615"} Jun 06 11:03:53 crc kubenswrapper[4911]: I0606 11:03:53.960290 4911 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.204046 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z8dls"] Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.208298 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.218610 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z8dls"] Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.274188 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-utilities\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.274276 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpc89\" (UniqueName: \"kubernetes.io/projected/ccf3563f-0992-4859-b5c7-6c3a173d8d13-kube-api-access-vpc89\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.274767 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-catalog-content\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.377832 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-catalog-content\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.377976 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-utilities\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.378015 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpc89\" (UniqueName: \"kubernetes.io/projected/ccf3563f-0992-4859-b5c7-6c3a173d8d13-kube-api-access-vpc89\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.378639 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-utilities\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.379483 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-catalog-content\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.401396 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rhvhl"] Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.404043 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.416828 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpc89\" (UniqueName: \"kubernetes.io/projected/ccf3563f-0992-4859-b5c7-6c3a173d8d13-kube-api-access-vpc89\") pod \"certified-operators-z8dls\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.431835 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rhvhl"] Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.480740 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-catalog-content\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.480801 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfrl2\" (UniqueName: \"kubernetes.io/projected/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-kube-api-access-cfrl2\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.480856 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-utilities\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.541712 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.584628 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-catalog-content\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.585120 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-catalog-content\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.585169 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfrl2\" (UniqueName: \"kubernetes.io/projected/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-kube-api-access-cfrl2\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.585640 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-utilities\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.585928 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-utilities\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.612126 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfrl2\" (UniqueName: \"kubernetes.io/projected/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-kube-api-access-cfrl2\") pod \"redhat-operators-rhvhl\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:54 crc kubenswrapper[4911]: I0606 11:03:54.819861 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:03:55 crc kubenswrapper[4911]: I0606 11:03:55.447704 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z8dls"] Jun 06 11:03:55 crc kubenswrapper[4911]: W0606 11:03:55.879918 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb93acd8_cd7d_4a5e_bdc2_f55222a7a4ac.slice/crio-be89756a9b0b55d403b8404c9eae64c8bc79a04f1a5dc91c706fcb53748571dc WatchSource:0}: Error finding container be89756a9b0b55d403b8404c9eae64c8bc79a04f1a5dc91c706fcb53748571dc: Status 404 returned error can't find the container with id be89756a9b0b55d403b8404c9eae64c8bc79a04f1a5dc91c706fcb53748571dc Jun 06 11:03:55 crc kubenswrapper[4911]: I0606 11:03:55.880068 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rhvhl"] Jun 06 11:03:56 crc kubenswrapper[4911]: I0606 11:03:56.048970 4911 generic.go:334] "Generic (PLEG): container finished" podID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerID="d8ac1eb56020b8483c3c79bd3f5231612e88b4982fd9bffb1b8294dc2e37e58e" exitCode=0 Jun 06 11:03:56 crc kubenswrapper[4911]: I0606 11:03:56.049206 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-55rwb" event={"ID":"de8724e2-bbe9-420a-9a2c-87b45d80211a","Type":"ContainerDied","Data":"d8ac1eb56020b8483c3c79bd3f5231612e88b4982fd9bffb1b8294dc2e37e58e"} Jun 06 11:03:56 crc kubenswrapper[4911]: I0606 11:03:56.053233 4911 generic.go:334] "Generic (PLEG): container finished" podID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerID="9837e01600ffd6aae3e76209a58fa7f4bb388d29429f1699e8fc1821dc7f8580" exitCode=0 Jun 06 11:03:56 crc kubenswrapper[4911]: I0606 11:03:56.053322 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8dls" event={"ID":"ccf3563f-0992-4859-b5c7-6c3a173d8d13","Type":"ContainerDied","Data":"9837e01600ffd6aae3e76209a58fa7f4bb388d29429f1699e8fc1821dc7f8580"} Jun 06 11:03:56 crc kubenswrapper[4911]: I0606 11:03:56.053664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8dls" event={"ID":"ccf3563f-0992-4859-b5c7-6c3a173d8d13","Type":"ContainerStarted","Data":"ff798f8418adce0888f6a81b931c438b7684b3eab1d8378af694bf0aad56cfba"} Jun 06 11:03:56 crc kubenswrapper[4911]: I0606 11:03:56.055626 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhvhl" event={"ID":"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac","Type":"ContainerStarted","Data":"be89756a9b0b55d403b8404c9eae64c8bc79a04f1a5dc91c706fcb53748571dc"} Jun 06 11:03:56 crc kubenswrapper[4911]: I0606 11:03:56.948341 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:03:56 crc kubenswrapper[4911]: E0606 11:03:56.949365 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:03:57 crc kubenswrapper[4911]: I0606 11:03:57.078686 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-55rwb" event={"ID":"de8724e2-bbe9-420a-9a2c-87b45d80211a","Type":"ContainerStarted","Data":"aad6616089c4d7c8004879baa9a43af3ad265400a0c0ad034b33330826ea9e7b"} Jun 06 11:03:57 crc kubenswrapper[4911]: I0606 11:03:57.082969 4911 generic.go:334] "Generic (PLEG): container finished" podID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerID="0ce6a4ca1de88d04236d72ddb4e02e614798f2d6b3fd6993c61df7f9a78aeb0e" exitCode=0 Jun 06 11:03:57 crc kubenswrapper[4911]: I0606 11:03:57.083045 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhvhl" event={"ID":"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac","Type":"ContainerDied","Data":"0ce6a4ca1de88d04236d72ddb4e02e614798f2d6b3fd6993c61df7f9a78aeb0e"} Jun 06 11:03:57 crc kubenswrapper[4911]: I0606 11:03:57.109970 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-55rwb" podStartSLOduration=3.5195046420000002 podStartE2EDuration="6.109941494s" podCreationTimestamp="2025-06-06 11:03:51 +0000 UTC" firstStartedPulling="2025-06-06 11:03:53.95998323 +0000 UTC m=+6645.235408763" lastFinishedPulling="2025-06-06 11:03:56.550420072 +0000 UTC m=+6647.825845615" observedRunningTime="2025-06-06 11:03:57.105054027 +0000 UTC m=+6648.380479560" watchObservedRunningTime="2025-06-06 11:03:57.109941494 +0000 UTC m=+6648.385367037" Jun 06 11:03:58 crc kubenswrapper[4911]: I0606 11:03:58.104060 4911 generic.go:334] "Generic (PLEG): container finished" podID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerID="86a5809de2ee6f1b5516821e58669f0081fa8beef3d97095631eba553545f657" exitCode=0 Jun 06 11:03:58 crc kubenswrapper[4911]: I0606 11:03:58.104664 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8dls" event={"ID":"ccf3563f-0992-4859-b5c7-6c3a173d8d13","Type":"ContainerDied","Data":"86a5809de2ee6f1b5516821e58669f0081fa8beef3d97095631eba553545f657"} Jun 06 11:03:59 crc kubenswrapper[4911]: I0606 11:03:59.124475 4911 generic.go:334] "Generic (PLEG): container finished" podID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerID="d6bdf1060e70bb8960dbab52077d93163254a4118356e746fc319893f4a8d974" exitCode=0 Jun 06 11:03:59 crc kubenswrapper[4911]: I0606 11:03:59.124591 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhvhl" event={"ID":"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac","Type":"ContainerDied","Data":"d6bdf1060e70bb8960dbab52077d93163254a4118356e746fc319893f4a8d974"} Jun 06 11:03:59 crc kubenswrapper[4911]: I0606 11:03:59.129514 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8dls" event={"ID":"ccf3563f-0992-4859-b5c7-6c3a173d8d13","Type":"ContainerStarted","Data":"4eb55e8c8b38c6dc36a907e2fc312f37830cbb4f547bd536c389e3737d57c705"} Jun 06 11:03:59 crc kubenswrapper[4911]: I0606 11:03:59.172700 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z8dls" podStartSLOduration=2.612808671 podStartE2EDuration="5.172670254s" podCreationTimestamp="2025-06-06 11:03:54 +0000 UTC" firstStartedPulling="2025-06-06 11:03:56.058356661 +0000 UTC m=+6647.333782204" lastFinishedPulling="2025-06-06 11:03:58.618218234 +0000 UTC m=+6649.893643787" observedRunningTime="2025-06-06 11:03:59.165274063 +0000 UTC m=+6650.440699616" watchObservedRunningTime="2025-06-06 11:03:59.172670254 +0000 UTC m=+6650.448095797" Jun 06 11:04:00 crc kubenswrapper[4911]: I0606 11:04:00.145430 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhvhl" event={"ID":"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac","Type":"ContainerStarted","Data":"b12eee3d7819a431847d23600f77bfd18dcb256af0508d083c096033f02b4797"} Jun 06 11:04:00 crc kubenswrapper[4911]: I0606 11:04:00.176949 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rhvhl" podStartSLOduration=3.453120492 podStartE2EDuration="6.176921685s" podCreationTimestamp="2025-06-06 11:03:54 +0000 UTC" firstStartedPulling="2025-06-06 11:03:57.099835133 +0000 UTC m=+6648.375260666" lastFinishedPulling="2025-06-06 11:03:59.823636316 +0000 UTC m=+6651.099061859" observedRunningTime="2025-06-06 11:04:00.168042586 +0000 UTC m=+6651.443468149" watchObservedRunningTime="2025-06-06 11:04:00.176921685 +0000 UTC m=+6651.452347228" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.611395 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-mvxcd"] Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.614827 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.684270 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89377167-f203-4492-8bc3-a8bdc8037ff0-host\") pod \"crc-debug-mvxcd\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.684381 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwbjx\" (UniqueName: \"kubernetes.io/projected/89377167-f203-4492-8bc3-a8bdc8037ff0-kube-api-access-dwbjx\") pod \"crc-debug-mvxcd\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.787240 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89377167-f203-4492-8bc3-a8bdc8037ff0-host\") pod \"crc-debug-mvxcd\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.787353 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwbjx\" (UniqueName: \"kubernetes.io/projected/89377167-f203-4492-8bc3-a8bdc8037ff0-kube-api-access-dwbjx\") pod \"crc-debug-mvxcd\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.787787 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89377167-f203-4492-8bc3-a8bdc8037ff0-host\") pod \"crc-debug-mvxcd\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.817880 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwbjx\" (UniqueName: \"kubernetes.io/projected/89377167-f203-4492-8bc3-a8bdc8037ff0-kube-api-access-dwbjx\") pod \"crc-debug-mvxcd\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: I0606 11:04:01.941059 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mvxcd" Jun 06 11:04:01 crc kubenswrapper[4911]: W0606 11:04:01.977004 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89377167_f203_4492_8bc3_a8bdc8037ff0.slice/crio-16b61cc5585d093e04639ef87b16ba52c8f4768872fef0c06c78f027dad35014 WatchSource:0}: Error finding container 16b61cc5585d093e04639ef87b16ba52c8f4768872fef0c06c78f027dad35014: Status 404 returned error can't find the container with id 16b61cc5585d093e04639ef87b16ba52c8f4768872fef0c06c78f027dad35014 Jun 06 11:04:02 crc kubenswrapper[4911]: I0606 11:04:02.169605 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mvxcd" event={"ID":"89377167-f203-4492-8bc3-a8bdc8037ff0","Type":"ContainerStarted","Data":"16b61cc5585d093e04639ef87b16ba52c8f4768872fef0c06c78f027dad35014"} Jun 06 11:04:02 crc kubenswrapper[4911]: I0606 11:04:02.338127 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:04:02 crc kubenswrapper[4911]: I0606 11:04:02.340877 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:04:02 crc kubenswrapper[4911]: I0606 11:04:02.416370 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:04:03 crc kubenswrapper[4911]: I0606 11:04:03.182341 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-mvxcd" event={"ID":"89377167-f203-4492-8bc3-a8bdc8037ff0","Type":"ContainerStarted","Data":"8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b"} Jun 06 11:04:03 crc kubenswrapper[4911]: I0606 11:04:03.241619 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-mvxcd" podStartSLOduration=2.241580507 podStartE2EDuration="2.241580507s" podCreationTimestamp="2025-06-06 11:04:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:04:03.204819818 +0000 UTC m=+6654.480245381" watchObservedRunningTime="2025-06-06 11:04:03.241580507 +0000 UTC m=+6654.517006050" Jun 06 11:04:03 crc kubenswrapper[4911]: I0606 11:04:03.245681 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:04:04 crc kubenswrapper[4911]: I0606 11:04:04.543558 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:04:04 crc kubenswrapper[4911]: I0606 11:04:04.544423 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:04:04 crc kubenswrapper[4911]: I0606 11:04:04.626030 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:04:04 crc kubenswrapper[4911]: I0606 11:04:04.821248 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:04:04 crc kubenswrapper[4911]: I0606 11:04:04.821392 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:04:04 crc kubenswrapper[4911]: I0606 11:04:04.886053 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:04:05 crc kubenswrapper[4911]: I0606 11:04:05.185878 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-55rwb"] Jun 06 11:04:05 crc kubenswrapper[4911]: I0606 11:04:05.261056 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:04:05 crc kubenswrapper[4911]: I0606 11:04:05.266283 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:04:06 crc kubenswrapper[4911]: I0606 11:04:06.217927 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-55rwb" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="registry-server" containerID="cri-o://aad6616089c4d7c8004879baa9a43af3ad265400a0c0ad034b33330826ea9e7b" gracePeriod=2 Jun 06 11:04:06 crc kubenswrapper[4911]: I0606 11:04:06.987855 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z8dls"] Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.230007 4911 generic.go:334] "Generic (PLEG): container finished" podID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerID="aad6616089c4d7c8004879baa9a43af3ad265400a0c0ad034b33330826ea9e7b" exitCode=0 Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.230371 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z8dls" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="registry-server" containerID="cri-o://4eb55e8c8b38c6dc36a907e2fc312f37830cbb4f547bd536c389e3737d57c705" gracePeriod=2 Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.230841 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-55rwb" event={"ID":"de8724e2-bbe9-420a-9a2c-87b45d80211a","Type":"ContainerDied","Data":"aad6616089c4d7c8004879baa9a43af3ad265400a0c0ad034b33330826ea9e7b"} Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.652244 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rhvhl"] Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.674611 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.777928 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-catalog-content\") pod \"de8724e2-bbe9-420a-9a2c-87b45d80211a\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.778075 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-utilities\") pod \"de8724e2-bbe9-420a-9a2c-87b45d80211a\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.778137 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpd4j\" (UniqueName: \"kubernetes.io/projected/de8724e2-bbe9-420a-9a2c-87b45d80211a-kube-api-access-fpd4j\") pod \"de8724e2-bbe9-420a-9a2c-87b45d80211a\" (UID: \"de8724e2-bbe9-420a-9a2c-87b45d80211a\") " Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.780222 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-utilities" (OuterVolumeSpecName: "utilities") pod "de8724e2-bbe9-420a-9a2c-87b45d80211a" (UID: "de8724e2-bbe9-420a-9a2c-87b45d80211a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.785447 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de8724e2-bbe9-420a-9a2c-87b45d80211a-kube-api-access-fpd4j" (OuterVolumeSpecName: "kube-api-access-fpd4j") pod "de8724e2-bbe9-420a-9a2c-87b45d80211a" (UID: "de8724e2-bbe9-420a-9a2c-87b45d80211a"). InnerVolumeSpecName "kube-api-access-fpd4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.798599 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de8724e2-bbe9-420a-9a2c-87b45d80211a" (UID: "de8724e2-bbe9-420a-9a2c-87b45d80211a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.881194 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.881247 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpd4j\" (UniqueName: \"kubernetes.io/projected/de8724e2-bbe9-420a-9a2c-87b45d80211a-kube-api-access-fpd4j\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.881265 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de8724e2-bbe9-420a-9a2c-87b45d80211a-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:07 crc kubenswrapper[4911]: I0606 11:04:07.948129 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:04:07 crc kubenswrapper[4911]: E0606 11:04:07.948498 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.247432 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-55rwb" event={"ID":"de8724e2-bbe9-420a-9a2c-87b45d80211a","Type":"ContainerDied","Data":"b00f22a7d2171b3c609013c5dcd90c73c710c99b4f3c9f572379e34c64418615"} Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.247515 4911 scope.go:117] "RemoveContainer" containerID="aad6616089c4d7c8004879baa9a43af3ad265400a0c0ad034b33330826ea9e7b" Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.247828 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-55rwb" Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.259844 4911 generic.go:334] "Generic (PLEG): container finished" podID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerID="4eb55e8c8b38c6dc36a907e2fc312f37830cbb4f547bd536c389e3737d57c705" exitCode=0 Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.260464 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rhvhl" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="registry-server" containerID="cri-o://b12eee3d7819a431847d23600f77bfd18dcb256af0508d083c096033f02b4797" gracePeriod=2 Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.259976 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8dls" event={"ID":"ccf3563f-0992-4859-b5c7-6c3a173d8d13","Type":"ContainerDied","Data":"4eb55e8c8b38c6dc36a907e2fc312f37830cbb4f547bd536c389e3737d57c705"} Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.300560 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-55rwb"] Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.307919 4911 scope.go:117] "RemoveContainer" containerID="d8ac1eb56020b8483c3c79bd3f5231612e88b4982fd9bffb1b8294dc2e37e58e" Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.317629 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-55rwb"] Jun 06 11:04:08 crc kubenswrapper[4911]: I0606 11:04:08.471758 4911 scope.go:117] "RemoveContainer" containerID="e8eb71724315ce551cda547716649eb1fbf8dddf58383f3b959671078188df7a" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.046737 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.111218 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpc89\" (UniqueName: \"kubernetes.io/projected/ccf3563f-0992-4859-b5c7-6c3a173d8d13-kube-api-access-vpc89\") pod \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.111421 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-utilities\") pod \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.111497 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-catalog-content\") pod \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\" (UID: \"ccf3563f-0992-4859-b5c7-6c3a173d8d13\") " Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.112570 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-utilities" (OuterVolumeSpecName: "utilities") pod "ccf3563f-0992-4859-b5c7-6c3a173d8d13" (UID: "ccf3563f-0992-4859-b5c7-6c3a173d8d13"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.123456 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccf3563f-0992-4859-b5c7-6c3a173d8d13-kube-api-access-vpc89" (OuterVolumeSpecName: "kube-api-access-vpc89") pod "ccf3563f-0992-4859-b5c7-6c3a173d8d13" (UID: "ccf3563f-0992-4859-b5c7-6c3a173d8d13"). InnerVolumeSpecName "kube-api-access-vpc89". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.169686 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ccf3563f-0992-4859-b5c7-6c3a173d8d13" (UID: "ccf3563f-0992-4859-b5c7-6c3a173d8d13"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.215613 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpc89\" (UniqueName: \"kubernetes.io/projected/ccf3563f-0992-4859-b5c7-6c3a173d8d13-kube-api-access-vpc89\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.215657 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.215668 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccf3563f-0992-4859-b5c7-6c3a173d8d13-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.275233 4911 generic.go:334] "Generic (PLEG): container finished" podID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerID="b12eee3d7819a431847d23600f77bfd18dcb256af0508d083c096033f02b4797" exitCode=0 Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.275316 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhvhl" event={"ID":"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac","Type":"ContainerDied","Data":"b12eee3d7819a431847d23600f77bfd18dcb256af0508d083c096033f02b4797"} Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.281306 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8dls" event={"ID":"ccf3563f-0992-4859-b5c7-6c3a173d8d13","Type":"ContainerDied","Data":"ff798f8418adce0888f6a81b931c438b7684b3eab1d8378af694bf0aad56cfba"} Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.281372 4911 scope.go:117] "RemoveContainer" containerID="4eb55e8c8b38c6dc36a907e2fc312f37830cbb4f547bd536c389e3737d57c705" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.281430 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8dls" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.323353 4911 scope.go:117] "RemoveContainer" containerID="86a5809de2ee6f1b5516821e58669f0081fa8beef3d97095631eba553545f657" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.352775 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z8dls"] Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.394870 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z8dls"] Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.444253 4911 scope.go:117] "RemoveContainer" containerID="9837e01600ffd6aae3e76209a58fa7f4bb388d29429f1699e8fc1821dc7f8580" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.615713 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.735309 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfrl2\" (UniqueName: \"kubernetes.io/projected/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-kube-api-access-cfrl2\") pod \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.735453 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-catalog-content\") pod \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.735629 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-utilities\") pod \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\" (UID: \"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac\") " Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.738058 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-utilities" (OuterVolumeSpecName: "utilities") pod "eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" (UID: "eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.747421 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-kube-api-access-cfrl2" (OuterVolumeSpecName: "kube-api-access-cfrl2") pod "eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" (UID: "eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac"). InnerVolumeSpecName "kube-api-access-cfrl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.807571 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" (UID: "eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.840791 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.840843 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.840861 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfrl2\" (UniqueName: \"kubernetes.io/projected/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac-kube-api-access-cfrl2\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.982900 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" path="/var/lib/kubelet/pods/ccf3563f-0992-4859-b5c7-6c3a173d8d13/volumes" Jun 06 11:04:09 crc kubenswrapper[4911]: I0606 11:04:09.983917 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" path="/var/lib/kubelet/pods/de8724e2-bbe9-420a-9a2c-87b45d80211a/volumes" Jun 06 11:04:10 crc kubenswrapper[4911]: I0606 11:04:10.300233 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rhvhl" event={"ID":"eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac","Type":"ContainerDied","Data":"be89756a9b0b55d403b8404c9eae64c8bc79a04f1a5dc91c706fcb53748571dc"} Jun 06 11:04:10 crc kubenswrapper[4911]: I0606 11:04:10.300336 4911 scope.go:117] "RemoveContainer" containerID="b12eee3d7819a431847d23600f77bfd18dcb256af0508d083c096033f02b4797" Jun 06 11:04:10 crc kubenswrapper[4911]: I0606 11:04:10.302027 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rhvhl" Jun 06 11:04:10 crc kubenswrapper[4911]: I0606 11:04:10.322637 4911 scope.go:117] "RemoveContainer" containerID="d6bdf1060e70bb8960dbab52077d93163254a4118356e746fc319893f4a8d974" Jun 06 11:04:10 crc kubenswrapper[4911]: I0606 11:04:10.333810 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rhvhl"] Jun 06 11:04:10 crc kubenswrapper[4911]: I0606 11:04:10.350644 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rhvhl"] Jun 06 11:04:10 crc kubenswrapper[4911]: I0606 11:04:10.356750 4911 scope.go:117] "RemoveContainer" containerID="0ce6a4ca1de88d04236d72ddb4e02e614798f2d6b3fd6993c61df7f9a78aeb0e" Jun 06 11:04:11 crc kubenswrapper[4911]: I0606 11:04:11.958654 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54cd458d76-j6txj_201d1114-599e-4139-99e5-29e5cd900b81/barbican-api/0.log" Jun 06 11:04:11 crc kubenswrapper[4911]: I0606 11:04:11.962503 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" path="/var/lib/kubelet/pods/eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac/volumes" Jun 06 11:04:12 crc kubenswrapper[4911]: I0606 11:04:12.081687 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54cd458d76-j6txj_201d1114-599e-4139-99e5-29e5cd900b81/barbican-api-log/0.log" Jun 06 11:04:12 crc kubenswrapper[4911]: I0606 11:04:12.240494 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-64cdf9799b-mrpz2_5bfc6577-537e-444b-aeab-e3f12ef96053/barbican-keystone-listener/0.log" Jun 06 11:04:12 crc kubenswrapper[4911]: I0606 11:04:12.664219 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-64cdf9799b-mrpz2_5bfc6577-537e-444b-aeab-e3f12ef96053/barbican-keystone-listener-log/0.log" Jun 06 11:04:12 crc kubenswrapper[4911]: I0606 11:04:12.787334 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b8576d7c9-bckww_8b8904bf-3086-4a57-b18c-2f113621fc14/barbican-worker/0.log" Jun 06 11:04:12 crc kubenswrapper[4911]: I0606 11:04:12.884753 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-mvxcd"] Jun 06 11:04:12 crc kubenswrapper[4911]: I0606 11:04:12.885261 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-mvxcd" podUID="89377167-f203-4492-8bc3-a8bdc8037ff0" containerName="container-00" containerID="cri-o://8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b" gracePeriod=2 Jun 06 11:04:12 crc kubenswrapper[4911]: I0606 11:04:12.896440 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-mvxcd"] Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.051922 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b8576d7c9-bckww_8b8904bf-3086-4a57-b18c-2f113621fc14/barbican-worker-log/0.log" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.110883 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mvxcd" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.211462 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-78t24_a4c28cde-8e9e-469d-9960-ea174038d9ef/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.239727 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwbjx\" (UniqueName: \"kubernetes.io/projected/89377167-f203-4492-8bc3-a8bdc8037ff0-kube-api-access-dwbjx\") pod \"89377167-f203-4492-8bc3-a8bdc8037ff0\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.240050 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89377167-f203-4492-8bc3-a8bdc8037ff0-host\") pod \"89377167-f203-4492-8bc3-a8bdc8037ff0\" (UID: \"89377167-f203-4492-8bc3-a8bdc8037ff0\") " Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.240402 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89377167-f203-4492-8bc3-a8bdc8037ff0-host" (OuterVolumeSpecName: "host") pod "89377167-f203-4492-8bc3-a8bdc8037ff0" (UID: "89377167-f203-4492-8bc3-a8bdc8037ff0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.240583 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89377167-f203-4492-8bc3-a8bdc8037ff0-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.254419 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89377167-f203-4492-8bc3-a8bdc8037ff0-kube-api-access-dwbjx" (OuterVolumeSpecName: "kube-api-access-dwbjx") pod "89377167-f203-4492-8bc3-a8bdc8037ff0" (UID: "89377167-f203-4492-8bc3-a8bdc8037ff0"). InnerVolumeSpecName "kube-api-access-dwbjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.335164 4911 generic.go:334] "Generic (PLEG): container finished" podID="89377167-f203-4492-8bc3-a8bdc8037ff0" containerID="8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b" exitCode=0 Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.335257 4911 scope.go:117] "RemoveContainer" containerID="8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.335279 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-mvxcd" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.346443 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwbjx\" (UniqueName: \"kubernetes.io/projected/89377167-f203-4492-8bc3-a8bdc8037ff0-kube-api-access-dwbjx\") on node \"crc\" DevicePath \"\"" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.358011 4911 scope.go:117] "RemoveContainer" containerID="8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b" Jun 06 11:04:13 crc kubenswrapper[4911]: E0606 11:04:13.358731 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b\": container with ID starting with 8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b not found: ID does not exist" containerID="8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.358782 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b"} err="failed to get container status \"8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b\": rpc error: code = NotFound desc = could not find container \"8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b\": container with ID starting with 8145f39477184bd285332b5236e9ac0e53a9fa6fca16634d216bd110eea7124b not found: ID does not exist" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.374343 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_27c4a155-3275-4a3e-9d1b-18cfb92f7d99/ceilometer-central-agent/0.log" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.533311 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_27c4a155-3275-4a3e-9d1b-18cfb92f7d99/ceilometer-notification-agent/0.log" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.672857 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_27c4a155-3275-4a3e-9d1b-18cfb92f7d99/proxy-httpd/0.log" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.679586 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_27c4a155-3275-4a3e-9d1b-18cfb92f7d99/sg-core/0.log" Jun 06 11:04:13 crc kubenswrapper[4911]: I0606 11:04:13.985461 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89377167-f203-4492-8bc3-a8bdc8037ff0" path="/var/lib/kubelet/pods/89377167-f203-4492-8bc3-a8bdc8037ff0/volumes" Jun 06 11:04:14 crc kubenswrapper[4911]: I0606 11:04:14.100062 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph_fcb703c1-a42a-4e53-8bcb-6279a76856c1/ceph/0.log" Jun 06 11:04:14 crc kubenswrapper[4911]: I0606 11:04:14.498238 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ceed9593-a84f-422c-9eb1-b5ab24bbb3b6/cinder-api-log/0.log" Jun 06 11:04:14 crc kubenswrapper[4911]: I0606 11:04:14.507808 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ceed9593-a84f-422c-9eb1-b5ab24bbb3b6/cinder-api/0.log" Jun 06 11:04:14 crc kubenswrapper[4911]: I0606 11:04:14.668890 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_546d4fc3-dc24-45d0-b6dd-9237e2e648fd/cinder-backup/0.log" Jun 06 11:04:14 crc kubenswrapper[4911]: I0606 11:04:14.688216 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_546d4fc3-dc24-45d0-b6dd-9237e2e648fd/probe/0.log" Jun 06 11:04:14 crc kubenswrapper[4911]: I0606 11:04:14.856033 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a/cinder-scheduler/0.log" Jun 06 11:04:14 crc kubenswrapper[4911]: I0606 11:04:14.975483 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a3c3ebe6-0cf1-4da3-b6d2-cec759fcc28a/probe/0.log" Jun 06 11:04:15 crc kubenswrapper[4911]: I0606 11:04:15.147269 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_4f4fe571-77a4-4d40-843e-f5ed6091158e/cinder-volume/0.log" Jun 06 11:04:15 crc kubenswrapper[4911]: I0606 11:04:15.263610 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_4f4fe571-77a4-4d40-843e-f5ed6091158e/probe/0.log" Jun 06 11:04:15 crc kubenswrapper[4911]: I0606 11:04:15.422767 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-skcz9_23c4a939-99a1-4995-9bff-b48095f87e61/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:15 crc kubenswrapper[4911]: I0606 11:04:15.676075 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-z46kw_4856f749-b866-41cf-bdc2-0a5c8b2fce43/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:15 crc kubenswrapper[4911]: I0606 11:04:15.963195 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-68db4d6659-xfwpc_32f99d86-aac4-4887-a65e-c05e81a506b0/init/0.log" Jun 06 11:04:16 crc kubenswrapper[4911]: I0606 11:04:16.237426 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-68db4d6659-xfwpc_32f99d86-aac4-4887-a65e-c05e81a506b0/init/0.log" Jun 06 11:04:16 crc kubenswrapper[4911]: I0606 11:04:16.365405 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-fsdsq_aac6d9d8-4525-494e-8866-215dba01a06c/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:16 crc kubenswrapper[4911]: I0606 11:04:16.434515 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-68db4d6659-xfwpc_32f99d86-aac4-4887-a65e-c05e81a506b0/dnsmasq-dns/0.log" Jun 06 11:04:16 crc kubenswrapper[4911]: I0606 11:04:16.665646 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_63f644ac-2580-4e7b-a723-f9787e2aacad/glance-httpd/0.log" Jun 06 11:04:16 crc kubenswrapper[4911]: I0606 11:04:16.701131 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_63f644ac-2580-4e7b-a723-f9787e2aacad/glance-log/0.log" Jun 06 11:04:16 crc kubenswrapper[4911]: I0606 11:04:16.873060 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37d93350-7e25-445d-97e7-0095ebd1d997/glance-httpd/0.log" Jun 06 11:04:16 crc kubenswrapper[4911]: I0606 11:04:16.901709 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37d93350-7e25-445d-97e7-0095ebd1d997/glance-log/0.log" Jun 06 11:04:17 crc kubenswrapper[4911]: I0606 11:04:17.035575 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hknjq_ffaebe67-a4d5-46d4-8bdc-d8bdddd58ff3/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:17 crc kubenswrapper[4911]: I0606 11:04:17.198610 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-z2ngz_e9d76de2-0187-44f1-b3e6-457dc1ca47e1/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:17 crc kubenswrapper[4911]: I0606 11:04:17.595017 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29153401-s7jg4_92c84921-c3c0-43d5-b87c-813ffe3fa478/keystone-cron/0.log" Jun 06 11:04:17 crc kubenswrapper[4911]: I0606 11:04:17.795113 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29153461-hwpvm_f1ee55fa-6ee5-4590-9f82-3421b3b803fe/keystone-cron/0.log" Jun 06 11:04:18 crc kubenswrapper[4911]: I0606 11:04:18.021574 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_1be3e7e6-2701-48b1-b26b-a154930ba2bb/kube-state-metrics/0.log" Jun 06 11:04:18 crc kubenswrapper[4911]: I0606 11:04:18.293839 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-csqcw_7bf0ed24-49d2-403e-afe0-25483f916433/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:18 crc kubenswrapper[4911]: I0606 11:04:18.462923 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-58846dd748-sgvz6_b23e60ab-054b-41e9-98c1-a3b2abc02b52/keystone-api/0.log" Jun 06 11:04:18 crc kubenswrapper[4911]: I0606 11:04:18.703246 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_5f46a595-e6b7-4e07-a817-8e37db292ace/manila-api/0.log" Jun 06 11:04:18 crc kubenswrapper[4911]: I0606 11:04:18.796549 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_5f46a595-e6b7-4e07-a817-8e37db292ace/manila-api-log/0.log" Jun 06 11:04:18 crc kubenswrapper[4911]: I0606 11:04:18.940284 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_16bc7add-06e1-4709-88f7-19e18988ee26/manila-scheduler/0.log" Jun 06 11:04:18 crc kubenswrapper[4911]: I0606 11:04:18.999043 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_16bc7add-06e1-4709-88f7-19e18988ee26/probe/0.log" Jun 06 11:04:19 crc kubenswrapper[4911]: I0606 11:04:19.170265 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_80c180ce-4fd8-40f4-b0bd-aa247612baa6/probe/0.log" Jun 06 11:04:19 crc kubenswrapper[4911]: I0606 11:04:19.231743 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_80c180ce-4fd8-40f4-b0bd-aa247612baa6/manila-share/0.log" Jun 06 11:04:20 crc kubenswrapper[4911]: I0606 11:04:20.028915 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-j68n9_eea86939-055a-4c55-a850-7cdd3c82e998/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:20 crc kubenswrapper[4911]: I0606 11:04:20.352820 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-b9f76f6d7-79lsr_ab86ede4-0d0e-415a-8dd3-87509499f46e/neutron-httpd/0.log" Jun 06 11:04:20 crc kubenswrapper[4911]: I0606 11:04:20.790134 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-b9f76f6d7-79lsr_ab86ede4-0d0e-415a-8dd3-87509499f46e/neutron-api/0.log" Jun 06 11:04:21 crc kubenswrapper[4911]: I0606 11:04:21.702962 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_42ab8896-1ee7-4c4d-a713-5c28744fce0d/nova-cell0-conductor-conductor/0.log" Jun 06 11:04:21 crc kubenswrapper[4911]: I0606 11:04:21.955608 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:04:21 crc kubenswrapper[4911]: E0606 11:04:21.955847 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:04:22 crc kubenswrapper[4911]: I0606 11:04:22.617586 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_e8e14edf-bb25-4e67-87f2-d2a6e7f90b89/nova-cell1-conductor-conductor/0.log" Jun 06 11:04:22 crc kubenswrapper[4911]: I0606 11:04:22.913448 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b2ccf2c7-2f1c-48d9-812c-e148c3548e3a/nova-api-log/0.log" Jun 06 11:04:23 crc kubenswrapper[4911]: I0606 11:04:23.364730 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_adbe049a-9472-4e11-a99d-32969a196e78/nova-cell1-novncproxy-novncproxy/0.log" Jun 06 11:04:23 crc kubenswrapper[4911]: I0606 11:04:23.765353 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-w894s_8af0e0c9-e20f-479b-8622-49565f84eb2b/nova-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:23 crc kubenswrapper[4911]: I0606 11:04:23.785464 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_7fbe9d9e-a221-4ee6-9fd4-f5841ba38f20/memcached/0.log" Jun 06 11:04:24 crc kubenswrapper[4911]: I0606 11:04:24.076689 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4ac4797d-5355-4252-85b5-2c3be041d3ed/nova-metadata-log/0.log" Jun 06 11:04:24 crc kubenswrapper[4911]: I0606 11:04:24.083545 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_b2ccf2c7-2f1c-48d9-812c-e148c3548e3a/nova-api-api/0.log" Jun 06 11:04:24 crc kubenswrapper[4911]: I0606 11:04:24.463738 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_fc9662e0-ac5c-46b2-b608-945afee3f990/mysql-bootstrap/0.log" Jun 06 11:04:24 crc kubenswrapper[4911]: I0606 11:04:24.729485 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_fc9662e0-ac5c-46b2-b608-945afee3f990/mysql-bootstrap/0.log" Jun 06 11:04:24 crc kubenswrapper[4911]: I0606 11:04:24.742969 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f6b94193-31cf-475f-82b9-9229341065d8/nova-scheduler-scheduler/0.log" Jun 06 11:04:24 crc kubenswrapper[4911]: I0606 11:04:24.823509 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_fc9662e0-ac5c-46b2-b608-945afee3f990/galera/0.log" Jun 06 11:04:25 crc kubenswrapper[4911]: I0606 11:04:25.111325 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4a2cb894-6a4a-4a24-aedb-b3dabe082a4b/mysql-bootstrap/0.log" Jun 06 11:04:25 crc kubenswrapper[4911]: I0606 11:04:25.602970 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4a2cb894-6a4a-4a24-aedb-b3dabe082a4b/galera/0.log" Jun 06 11:04:25 crc kubenswrapper[4911]: I0606 11:04:25.645923 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4a2cb894-6a4a-4a24-aedb-b3dabe082a4b/mysql-bootstrap/0.log" Jun 06 11:04:25 crc kubenswrapper[4911]: I0606 11:04:25.848174 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_5bf7afff-c3ed-491d-a6e1-5c4ab20c8ac7/openstackclient/0.log" Jun 06 11:04:25 crc kubenswrapper[4911]: I0606 11:04:25.976513 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4dlv9_5a8407a6-611d-477c-8530-9c1728797994/ovn-controller/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.228900 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jjkvl_372d84e8-5035-48ca-9ee9-676bf64886c9/ovsdb-server-init/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.379930 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4ac4797d-5355-4252-85b5-2c3be041d3ed/nova-metadata-metadata/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.444036 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jjkvl_372d84e8-5035-48ca-9ee9-676bf64886c9/ovsdb-server-init/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.513570 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jjkvl_372d84e8-5035-48ca-9ee9-676bf64886c9/ovs-vswitchd/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.518420 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-jjkvl_372d84e8-5035-48ca-9ee9-676bf64886c9/ovsdb-server/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.740861 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qhv7q_2947ca03-3408-495a-961c-9d548088ebe4/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.791980 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-5f844cb4b9-bl729_d4d859d8-9e0d-4b6d-bbc6-c7ee95d0d2d9/ovn-northd/0.log" Jun 06 11:04:26 crc kubenswrapper[4911]: I0606 11:04:26.968655 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ececc27b-f281-4e57-9c46-0cbe3a1ab2d7/ovsdbserver-nb/0.log" Jun 06 11:04:27 crc kubenswrapper[4911]: I0606 11:04:27.062784 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_42b9b63b-5112-46d4-98d8-01e75d78b84f/ovsdbserver-sb/0.log" Jun 06 11:04:27 crc kubenswrapper[4911]: I0606 11:04:27.561938 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8/setup-container/0.log" Jun 06 11:04:27 crc kubenswrapper[4911]: I0606 11:04:27.690127 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-78bb87b554-zfswk_7ecf7a83-e983-4766-a328-31fc235a59c0/placement-api/0.log" Jun 06 11:04:27 crc kubenswrapper[4911]: I0606 11:04:27.837275 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-78bb87b554-zfswk_7ecf7a83-e983-4766-a328-31fc235a59c0/placement-log/0.log" Jun 06 11:04:27 crc kubenswrapper[4911]: I0606 11:04:27.863242 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8/setup-container/0.log" Jun 06 11:04:27 crc kubenswrapper[4911]: I0606 11:04:27.944044 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_1cd9c143-7cf0-4055-81a5-2b2ff33ae6b8/rabbitmq/0.log" Jun 06 11:04:28 crc kubenswrapper[4911]: I0606 11:04:28.058178 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_733f579d-aebd-484e-a85c-6e25204f363e/setup-container/0.log" Jun 06 11:04:28 crc kubenswrapper[4911]: I0606 11:04:28.294355 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_733f579d-aebd-484e-a85c-6e25204f363e/rabbitmq/0.log" Jun 06 11:04:28 crc kubenswrapper[4911]: I0606 11:04:28.318589 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_733f579d-aebd-484e-a85c-6e25204f363e/setup-container/0.log" Jun 06 11:04:28 crc kubenswrapper[4911]: I0606 11:04:28.333001 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-b8p9q_ae48d921-6c61-4025-b5fb-1d23ffb85636/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:28 crc kubenswrapper[4911]: I0606 11:04:28.705241 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-6dgxq_aee7dc49-e783-4847-bdba-f4e885cd4977/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:28 crc kubenswrapper[4911]: I0606 11:04:28.807627 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-7llkr_29c902ea-8d52-4a94-a4a8-7479bd7ee53a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:28 crc kubenswrapper[4911]: I0606 11:04:28.952931 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-gbljq_4724e2d7-9618-4720-ba4c-31204929dbb4/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.194080 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-d2w9b_3942d3f6-c8b5-4092-8a22-8d3958955ab2/ssh-known-hosts-edpm-deployment/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.395402 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-59f44bc869-n7dl7_72e5a926-1c68-4e9b-9240-44c27d488e36/proxy-server/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.487710 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-7lbp2_6d1f75cb-fd34-4f91-9ade-650845917e96/swift-ring-rebalance/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.517642 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-59f44bc869-n7dl7_72e5a926-1c68-4e9b-9240-44c27d488e36/proxy-httpd/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.759587 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/account-reaper/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.788484 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/account-auditor/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.914720 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/account-replicator/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.987215 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/container-auditor/0.log" Jun 06 11:04:29 crc kubenswrapper[4911]: I0606 11:04:29.989960 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/account-server/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.097250 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/container-replicator/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.199901 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/container-updater/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.232756 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/container-server/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.240701 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/object-auditor/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.346686 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/object-expirer/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.448491 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/object-replicator/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.464641 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/object-server/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.467906 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/object-updater/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.588073 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/rsync/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.680730 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bbc248fe-b133-4e7e-aad5-c29a3c215e6b/swift-recon-cron/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.713841 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-vcn2t_efc7deb8-a82d-4b0d-b3f9-200faf558eeb/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.986539 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-67f5j_3edd2d01-6fd8-44a5-affd-c2a0d9c114cb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Jun 06 11:04:30 crc kubenswrapper[4911]: I0606 11:04:30.991996 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_9a6fdc52-110b-4573-92b5-57f19994ab56/tempest-tests-tempest-tests-runner/0.log" Jun 06 11:04:32 crc kubenswrapper[4911]: I0606 11:04:32.948229 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:04:33 crc kubenswrapper[4911]: I0606 11:04:33.588168 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"4a9223c433fb722a9b85a4df3f4ee18287263b354b671709a533ea9543f9f310"} Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.327509 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-hmqns"] Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.328808 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89377167-f203-4492-8bc3-a8bdc8037ff0" containerName="container-00" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.328830 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="89377167-f203-4492-8bc3-a8bdc8037ff0" containerName="container-00" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.328854 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="extract-content" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.328861 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="extract-content" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.328902 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="extract-utilities" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.328911 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="extract-utilities" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.328926 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.328933 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.328947 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.328953 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.328967 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="extract-content" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.328974 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="extract-content" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.328985 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="extract-utilities" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.328995 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="extract-utilities" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.329014 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="extract-utilities" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.329023 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="extract-utilities" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.329045 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.329052 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: E0606 11:05:02.329066 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="extract-content" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.329074 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="extract-content" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.329362 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccf3563f-0992-4859-b5c7-6c3a173d8d13" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.329383 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb93acd8-cd7d-4a5e-bdc2-f55222a7a4ac" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.335160 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="de8724e2-bbe9-420a-9a2c-87b45d80211a" containerName="registry-server" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.335219 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="89377167-f203-4492-8bc3-a8bdc8037ff0" containerName="container-00" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.336247 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.367420 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74f164bf-27fa-4d16-9208-a4d901619744-host\") pod \"crc-debug-hmqns\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.367891 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs6tp\" (UniqueName: \"kubernetes.io/projected/74f164bf-27fa-4d16-9208-a4d901619744-kube-api-access-rs6tp\") pod \"crc-debug-hmqns\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.469774 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs6tp\" (UniqueName: \"kubernetes.io/projected/74f164bf-27fa-4d16-9208-a4d901619744-kube-api-access-rs6tp\") pod \"crc-debug-hmqns\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.469918 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74f164bf-27fa-4d16-9208-a4d901619744-host\") pod \"crc-debug-hmqns\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.470114 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74f164bf-27fa-4d16-9208-a4d901619744-host\") pod \"crc-debug-hmqns\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.508039 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs6tp\" (UniqueName: \"kubernetes.io/projected/74f164bf-27fa-4d16-9208-a4d901619744-kube-api-access-rs6tp\") pod \"crc-debug-hmqns\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.656609 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hmqns" Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.943647 4911 generic.go:334] "Generic (PLEG): container finished" podID="ad147ecb-393b-4645-b62e-6337b7cdf593" containerID="88bf0866b71b196c17e5f681a200bff5842a537d51ed00f90bcb4746a45a886b" exitCode=0 Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.943767 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" event={"ID":"ad147ecb-393b-4645-b62e-6337b7cdf593","Type":"ContainerDied","Data":"88bf0866b71b196c17e5f681a200bff5842a537d51ed00f90bcb4746a45a886b"} Jun 06 11:05:02 crc kubenswrapper[4911]: I0606 11:05:02.947107 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-hmqns" event={"ID":"74f164bf-27fa-4d16-9208-a4d901619744","Type":"ContainerStarted","Data":"3ad90425ae3ec1383a42fb18fd36f240d2f3b496a399223ce5d2975d3141d53d"} Jun 06 11:05:03 crc kubenswrapper[4911]: I0606 11:05:03.966930 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-hmqns" event={"ID":"74f164bf-27fa-4d16-9208-a4d901619744","Type":"ContainerStarted","Data":"bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0"} Jun 06 11:05:03 crc kubenswrapper[4911]: I0606 11:05:03.976866 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-hmqns" podStartSLOduration=1.976836349 podStartE2EDuration="1.976836349s" podCreationTimestamp="2025-06-06 11:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:05:03.97574199 +0000 UTC m=+6715.251167543" watchObservedRunningTime="2025-06-06 11:05:03.976836349 +0000 UTC m=+6715.252261892" Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.114713 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.152055 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-q8wz5"] Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.161593 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-q8wz5"] Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.316864 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad147ecb-393b-4645-b62e-6337b7cdf593-host\") pod \"ad147ecb-393b-4645-b62e-6337b7cdf593\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.316994 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt9ql\" (UniqueName: \"kubernetes.io/projected/ad147ecb-393b-4645-b62e-6337b7cdf593-kube-api-access-vt9ql\") pod \"ad147ecb-393b-4645-b62e-6337b7cdf593\" (UID: \"ad147ecb-393b-4645-b62e-6337b7cdf593\") " Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.317105 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad147ecb-393b-4645-b62e-6337b7cdf593-host" (OuterVolumeSpecName: "host") pod "ad147ecb-393b-4645-b62e-6337b7cdf593" (UID: "ad147ecb-393b-4645-b62e-6337b7cdf593"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.317616 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ad147ecb-393b-4645-b62e-6337b7cdf593-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.327484 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad147ecb-393b-4645-b62e-6337b7cdf593-kube-api-access-vt9ql" (OuterVolumeSpecName: "kube-api-access-vt9ql") pod "ad147ecb-393b-4645-b62e-6337b7cdf593" (UID: "ad147ecb-393b-4645-b62e-6337b7cdf593"). InnerVolumeSpecName "kube-api-access-vt9ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.420439 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt9ql\" (UniqueName: \"kubernetes.io/projected/ad147ecb-393b-4645-b62e-6337b7cdf593-kube-api-access-vt9ql\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.970626 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-q8wz5" Jun 06 11:05:04 crc kubenswrapper[4911]: I0606 11:05:04.971489 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="301808d5f728c08427bf6edd987e3664dc8e987446f493b870a23e05bc28f970" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.411868 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-vtzjw"] Jun 06 11:05:05 crc kubenswrapper[4911]: E0606 11:05:05.413292 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad147ecb-393b-4645-b62e-6337b7cdf593" containerName="container-00" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.413317 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad147ecb-393b-4645-b62e-6337b7cdf593" containerName="container-00" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.413725 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad147ecb-393b-4645-b62e-6337b7cdf593" containerName="container-00" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.414930 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.449561 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-host\") pod \"crc-debug-vtzjw\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.449942 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqxxd\" (UniqueName: \"kubernetes.io/projected/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-kube-api-access-cqxxd\") pod \"crc-debug-vtzjw\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.553510 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-host\") pod \"crc-debug-vtzjw\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.553585 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqxxd\" (UniqueName: \"kubernetes.io/projected/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-kube-api-access-cqxxd\") pod \"crc-debug-vtzjw\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.553715 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-host\") pod \"crc-debug-vtzjw\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.577053 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqxxd\" (UniqueName: \"kubernetes.io/projected/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-kube-api-access-cqxxd\") pod \"crc-debug-vtzjw\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.752432 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:05 crc kubenswrapper[4911]: W0606 11:05:05.802406 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1ae8efe_d8b0_41fd_b7f8_82e12d12aa57.slice/crio-66c8d3db969de00490b448a32d1eed10b0c6e2213af82762f84d07c1cd962397 WatchSource:0}: Error finding container 66c8d3db969de00490b448a32d1eed10b0c6e2213af82762f84d07c1cd962397: Status 404 returned error can't find the container with id 66c8d3db969de00490b448a32d1eed10b0c6e2213af82762f84d07c1cd962397 Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.959762 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad147ecb-393b-4645-b62e-6337b7cdf593" path="/var/lib/kubelet/pods/ad147ecb-393b-4645-b62e-6337b7cdf593/volumes" Jun 06 11:05:05 crc kubenswrapper[4911]: I0606 11:05:05.980037 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" event={"ID":"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57","Type":"ContainerStarted","Data":"66c8d3db969de00490b448a32d1eed10b0c6e2213af82762f84d07c1cd962397"} Jun 06 11:05:06 crc kubenswrapper[4911]: I0606 11:05:06.991142 4911 generic.go:334] "Generic (PLEG): container finished" podID="a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57" containerID="d16059bab9796ad749188a1b3cefb29475c66c3681cb856c19d7b65b670468c3" exitCode=0 Jun 06 11:05:06 crc kubenswrapper[4911]: I0606 11:05:06.991289 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" event={"ID":"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57","Type":"ContainerDied","Data":"d16059bab9796ad749188a1b3cefb29475c66c3681cb856c19d7b65b670468c3"} Jun 06 11:05:08 crc kubenswrapper[4911]: I0606 11:05:08.098240 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:08 crc kubenswrapper[4911]: I0606 11:05:08.115974 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqxxd\" (UniqueName: \"kubernetes.io/projected/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-kube-api-access-cqxxd\") pod \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " Jun 06 11:05:08 crc kubenswrapper[4911]: I0606 11:05:08.128345 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-kube-api-access-cqxxd" (OuterVolumeSpecName: "kube-api-access-cqxxd") pod "a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57" (UID: "a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57"). InnerVolumeSpecName "kube-api-access-cqxxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:05:08 crc kubenswrapper[4911]: I0606 11:05:08.218414 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-host\") pod \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\" (UID: \"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57\") " Jun 06 11:05:08 crc kubenswrapper[4911]: I0606 11:05:08.218508 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-host" (OuterVolumeSpecName: "host") pod "a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57" (UID: "a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:05:08 crc kubenswrapper[4911]: I0606 11:05:08.219980 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqxxd\" (UniqueName: \"kubernetes.io/projected/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-kube-api-access-cqxxd\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:08 crc kubenswrapper[4911]: I0606 11:05:08.220008 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:09 crc kubenswrapper[4911]: I0606 11:05:09.020671 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" event={"ID":"a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57","Type":"ContainerDied","Data":"66c8d3db969de00490b448a32d1eed10b0c6e2213af82762f84d07c1cd962397"} Jun 06 11:05:09 crc kubenswrapper[4911]: I0606 11:05:09.022351 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66c8d3db969de00490b448a32d1eed10b0c6e2213af82762f84d07c1cd962397" Jun 06 11:05:09 crc kubenswrapper[4911]: I0606 11:05:09.022512 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-vtzjw" Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.364620 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-hmqns"] Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.365741 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-hmqns" podUID="74f164bf-27fa-4d16-9208-a4d901619744" containerName="container-00" containerID="cri-o://bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0" gracePeriod=2 Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.375497 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-hmqns"] Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.489594 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hmqns" Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.556884 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74f164bf-27fa-4d16-9208-a4d901619744-host\") pod \"74f164bf-27fa-4d16-9208-a4d901619744\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.556937 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs6tp\" (UniqueName: \"kubernetes.io/projected/74f164bf-27fa-4d16-9208-a4d901619744-kube-api-access-rs6tp\") pod \"74f164bf-27fa-4d16-9208-a4d901619744\" (UID: \"74f164bf-27fa-4d16-9208-a4d901619744\") " Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.557000 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/74f164bf-27fa-4d16-9208-a4d901619744-host" (OuterVolumeSpecName: "host") pod "74f164bf-27fa-4d16-9208-a4d901619744" (UID: "74f164bf-27fa-4d16-9208-a4d901619744"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.557473 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74f164bf-27fa-4d16-9208-a4d901619744-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.575122 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74f164bf-27fa-4d16-9208-a4d901619744-kube-api-access-rs6tp" (OuterVolumeSpecName: "kube-api-access-rs6tp") pod "74f164bf-27fa-4d16-9208-a4d901619744" (UID: "74f164bf-27fa-4d16-9208-a4d901619744"). InnerVolumeSpecName "kube-api-access-rs6tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.659434 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs6tp\" (UniqueName: \"kubernetes.io/projected/74f164bf-27fa-4d16-9208-a4d901619744-kube-api-access-rs6tp\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:13 crc kubenswrapper[4911]: I0606 11:05:13.961327 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74f164bf-27fa-4d16-9208-a4d901619744" path="/var/lib/kubelet/pods/74f164bf-27fa-4d16-9208-a4d901619744/volumes" Jun 06 11:05:14 crc kubenswrapper[4911]: I0606 11:05:14.086819 4911 generic.go:334] "Generic (PLEG): container finished" podID="74f164bf-27fa-4d16-9208-a4d901619744" containerID="bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0" exitCode=0 Jun 06 11:05:14 crc kubenswrapper[4911]: I0606 11:05:14.086897 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-hmqns" Jun 06 11:05:14 crc kubenswrapper[4911]: I0606 11:05:14.086993 4911 scope.go:117] "RemoveContainer" containerID="bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0" Jun 06 11:05:14 crc kubenswrapper[4911]: I0606 11:05:14.118599 4911 scope.go:117] "RemoveContainer" containerID="bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0" Jun 06 11:05:14 crc kubenswrapper[4911]: E0606 11:05:14.120438 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0\": container with ID starting with bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0 not found: ID does not exist" containerID="bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0" Jun 06 11:05:14 crc kubenswrapper[4911]: I0606 11:05:14.120520 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0"} err="failed to get container status \"bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0\": rpc error: code = NotFound desc = could not find container \"bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0\": container with ID starting with bd50bf804edf7a37194f373bf70d542eeea813ee8c877ba0ca6aa648742879c0 not found: ID does not exist" Jun 06 11:05:16 crc kubenswrapper[4911]: I0606 11:05:16.888899 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-vtzjw"] Jun 06 11:05:16 crc kubenswrapper[4911]: I0606 11:05:16.897000 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-vtzjw"] Jun 06 11:05:17 crc kubenswrapper[4911]: I0606 11:05:17.993979 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57" path="/var/lib/kubelet/pods/a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57/volumes" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.145397 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-zq6hv"] Jun 06 11:05:18 crc kubenswrapper[4911]: E0606 11:05:18.145889 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57" containerName="container-00" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.145906 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57" containerName="container-00" Jun 06 11:05:18 crc kubenswrapper[4911]: E0606 11:05:18.145918 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f164bf-27fa-4d16-9208-a4d901619744" containerName="container-00" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.145926 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f164bf-27fa-4d16-9208-a4d901619744" containerName="container-00" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.146152 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="74f164bf-27fa-4d16-9208-a4d901619744" containerName="container-00" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.146175 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1ae8efe-d8b0-41fd-b7f8-82e12d12aa57" containerName="container-00" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.146873 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.278045 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6wmw\" (UniqueName: \"kubernetes.io/projected/672504fb-1d3e-45fe-b896-3a80f06d97f0-kube-api-access-v6wmw\") pod \"crc-debug-zq6hv\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.278109 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/672504fb-1d3e-45fe-b896-3a80f06d97f0-host\") pod \"crc-debug-zq6hv\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.380881 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6wmw\" (UniqueName: \"kubernetes.io/projected/672504fb-1d3e-45fe-b896-3a80f06d97f0-kube-api-access-v6wmw\") pod \"crc-debug-zq6hv\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.381435 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/672504fb-1d3e-45fe-b896-3a80f06d97f0-host\") pod \"crc-debug-zq6hv\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.381689 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/672504fb-1d3e-45fe-b896-3a80f06d97f0-host\") pod \"crc-debug-zq6hv\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.417882 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6wmw\" (UniqueName: \"kubernetes.io/projected/672504fb-1d3e-45fe-b896-3a80f06d97f0-kube-api-access-v6wmw\") pod \"crc-debug-zq6hv\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:18 crc kubenswrapper[4911]: I0606 11:05:18.473911 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:19 crc kubenswrapper[4911]: I0606 11:05:19.138211 4911 generic.go:334] "Generic (PLEG): container finished" podID="672504fb-1d3e-45fe-b896-3a80f06d97f0" containerID="59bafc177f162854358af1e45acfb214d74b59e0249ec950d9f9413ebe6c281a" exitCode=0 Jun 06 11:05:19 crc kubenswrapper[4911]: I0606 11:05:19.138320 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" event={"ID":"672504fb-1d3e-45fe-b896-3a80f06d97f0","Type":"ContainerDied","Data":"59bafc177f162854358af1e45acfb214d74b59e0249ec950d9f9413ebe6c281a"} Jun 06 11:05:19 crc kubenswrapper[4911]: I0606 11:05:19.138738 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" event={"ID":"672504fb-1d3e-45fe-b896-3a80f06d97f0","Type":"ContainerStarted","Data":"b608d016b1f575c1fddddd7b8659e39b9815af3f461c39de35f32802cfb05b04"} Jun 06 11:05:19 crc kubenswrapper[4911]: I0606 11:05:19.185371 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-zq6hv"] Jun 06 11:05:19 crc kubenswrapper[4911]: I0606 11:05:19.200385 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zrwfs/crc-debug-zq6hv"] Jun 06 11:05:20 crc kubenswrapper[4911]: I0606 11:05:20.276000 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:20 crc kubenswrapper[4911]: I0606 11:05:20.430337 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/672504fb-1d3e-45fe-b896-3a80f06d97f0-host\") pod \"672504fb-1d3e-45fe-b896-3a80f06d97f0\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " Jun 06 11:05:20 crc kubenswrapper[4911]: I0606 11:05:20.430477 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/672504fb-1d3e-45fe-b896-3a80f06d97f0-host" (OuterVolumeSpecName: "host") pod "672504fb-1d3e-45fe-b896-3a80f06d97f0" (UID: "672504fb-1d3e-45fe-b896-3a80f06d97f0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:05:20 crc kubenswrapper[4911]: I0606 11:05:20.430625 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6wmw\" (UniqueName: \"kubernetes.io/projected/672504fb-1d3e-45fe-b896-3a80f06d97f0-kube-api-access-v6wmw\") pod \"672504fb-1d3e-45fe-b896-3a80f06d97f0\" (UID: \"672504fb-1d3e-45fe-b896-3a80f06d97f0\") " Jun 06 11:05:20 crc kubenswrapper[4911]: I0606 11:05:20.431461 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/672504fb-1d3e-45fe-b896-3a80f06d97f0-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:20 crc kubenswrapper[4911]: I0606 11:05:20.437588 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/672504fb-1d3e-45fe-b896-3a80f06d97f0-kube-api-access-v6wmw" (OuterVolumeSpecName: "kube-api-access-v6wmw") pod "672504fb-1d3e-45fe-b896-3a80f06d97f0" (UID: "672504fb-1d3e-45fe-b896-3a80f06d97f0"). InnerVolumeSpecName "kube-api-access-v6wmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:05:20 crc kubenswrapper[4911]: I0606 11:05:20.534479 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6wmw\" (UniqueName: \"kubernetes.io/projected/672504fb-1d3e-45fe-b896-3a80f06d97f0-kube-api-access-v6wmw\") on node \"crc\" DevicePath \"\"" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.165397 4911 scope.go:117] "RemoveContainer" containerID="59bafc177f162854358af1e45acfb214d74b59e0249ec950d9f9413ebe6c281a" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.165485 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/crc-debug-zq6hv" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.318017 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw_8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5/util/0.log" Jun 06 11:05:21 crc kubenswrapper[4911]: E0606 11:05:21.415358 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod672504fb_1d3e_45fe_b896_3a80f06d97f0.slice/crio-b608d016b1f575c1fddddd7b8659e39b9815af3f461c39de35f32802cfb05b04\": RecentStats: unable to find data in memory cache]" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.552579 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw_8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5/pull/0.log" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.564274 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw_8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5/util/0.log" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.571336 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw_8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5/pull/0.log" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.787326 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw_8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5/pull/0.log" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.789068 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw_8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5/util/0.log" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.835247 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3c2cf75bdf5a3769a049ee81bdb98b53ae4eeb6deec27c8b3ca62c9d5dljdlw_8fa3ccb0-44a5-4281-9c85-a2f59b2f18d5/extract/0.log" Jun 06 11:05:21 crc kubenswrapper[4911]: I0606 11:05:21.960372 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="672504fb-1d3e-45fe-b896-3a80f06d97f0" path="/var/lib/kubelet/pods/672504fb-1d3e-45fe-b896-3a80f06d97f0/volumes" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.021171 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-9889b4756-xv9ps_c5040ae4-3ac1-4bf6-a982-4dd494402e9f/kube-rbac-proxy/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.091671 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-9889b4756-xv9ps_c5040ae4-3ac1-4bf6-a982-4dd494402e9f/manager/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.142185 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-57f4dc9749-jnxvw_02910366-cde2-4ad1-a276-cb4fe7c3e4c1/kube-rbac-proxy/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.308486 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-57f4dc9749-jnxvw_02910366-cde2-4ad1-a276-cb4fe7c3e4c1/manager/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.419442 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b554678df-g2vch_2ac3f341-0925-45e2-a1fe-f356cde13a14/manager/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.428064 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b554678df-g2vch_2ac3f341-0925-45e2-a1fe-f356cde13a14/kube-rbac-proxy/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.621741 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-97b97479c-j5r77_0631c55e-f521-4524-9881-14a20a5b280d/kube-rbac-proxy/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.738451 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-97b97479c-j5r77_0631c55e-f521-4524-9881-14a20a5b280d/manager/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.825741 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5486f4b54f-klhdq_b6cd86f9-52b4-430f-b1d6-105fe436aff6/kube-rbac-proxy/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.950752 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5486f4b54f-klhdq_b6cd86f9-52b4-430f-b1d6-105fe436aff6/manager/0.log" Jun 06 11:05:22 crc kubenswrapper[4911]: I0606 11:05:22.954765 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7777cf768b-l4tg9_3431e651-edaf-4b0b-b6a1-f56fbae01047/kube-rbac-proxy/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.070168 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7777cf768b-l4tg9_3431e651-edaf-4b0b-b6a1-f56fbae01047/manager/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.182084 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5b4ccb8c4-t77qw_229f54b7-db5c-48f8-9188-8ba38df574b5/kube-rbac-proxy/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.427422 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5b4ccb8c4-t77qw_229f54b7-db5c-48f8-9188-8ba38df574b5/manager/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.439576 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-68f4bbb747-rsbj6_639b9190-f625-453c-8ab5-d48b2140f801/kube-rbac-proxy/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.465773 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-68f4bbb747-rsbj6_639b9190-f625-453c-8ab5-d48b2140f801/manager/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.613968 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5ccbd96f89-fw9ps_38945f0e-4d6b-44ba-b644-cdb391508f47/kube-rbac-proxy/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.730211 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5ccbd96f89-fw9ps_38945f0e-4d6b-44ba-b644-cdb391508f47/manager/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.846009 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-75b8755b74-8djvm_8d372995-214b-497b-807e-56a813866d07/kube-rbac-proxy/0.log" Jun 06 11:05:23 crc kubenswrapper[4911]: I0606 11:05:23.924634 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-75b8755b74-8djvm_8d372995-214b-497b-807e-56a813866d07/manager/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.039765 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-7d4bbc7f54-rgdz4_33d5bd3b-0223-4a9b-93a7-85004ae5f40a/kube-rbac-proxy/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.143853 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-7d4bbc7f54-rgdz4_33d5bd3b-0223-4a9b-93a7-85004ae5f40a/manager/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.223776 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5df6744645-slsxs_6ae578bf-06e2-4fcd-b272-84216e832cb2/kube-rbac-proxy/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.295641 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5df6744645-slsxs_6ae578bf-06e2-4fcd-b272-84216e832cb2/manager/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.426131 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-664db87fd8-hfzdg_06051762-e014-4c42-9e66-e34da6b618c8/kube-rbac-proxy/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.497772 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-664db87fd8-hfzdg_06051762-e014-4c42-9e66-e34da6b618c8/manager/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.655463 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-857f9d6b88-ggz4c_39ddd5bc-de82-4e82-9744-b4d5a64e052f/kube-rbac-proxy/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.730264 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-857f9d6b88-ggz4c_39ddd5bc-de82-4e82-9744-b4d5a64e052f/manager/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.954697 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn_2b8c5692-2750-401d-bee2-4717f71fc6df/kube-rbac-proxy/0.log" Jun 06 11:05:24 crc kubenswrapper[4911]: I0606 11:05:24.962541 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7dfb6cb54-mgrrn_2b8c5692-2750-401d-bee2-4717f71fc6df/manager/0.log" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.047536 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-764c8d9cbc-rtgz2_24fd9a67-5d42-4633-bc61-19f5975b18ff/kube-rbac-proxy/0.log" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.158528 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69c87d8fd5-b85qh_0003c307-6b38-4d34-a39c-8f1792405537/kube-rbac-proxy/0.log" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.381884 4911 scope.go:117] "RemoveContainer" containerID="b2a2daaf8026c1866d28f0fd7f57ef26565f27d5664bfbb37185b47b91e316b5" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.422024 4911 scope.go:117] "RemoveContainer" containerID="1b83afc20bf56d36ec50731ba539124f4d3fe591233624624a9c43948f20e167" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.450542 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-lhvml_f0aa4dae-1e09-43b9-8b7e-380ab59d1d2e/registry-server/0.log" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.460172 4911 scope.go:117] "RemoveContainer" containerID="6cb9df1d6e68c19fdbba9b8733815e36643a159128d0d2e0f21137c425d2c8f3" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.527507 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69c87d8fd5-b85qh_0003c307-6b38-4d34-a39c-8f1792405537/operator/0.log" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.684508 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9f78645d5-6p5gr_dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944/kube-rbac-proxy/0.log" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.786310 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9f78645d5-6p5gr_dc5fcb2e-a7ac-46ae-a2ff-d6769b96d944/manager/0.log" Jun 06 11:05:25 crc kubenswrapper[4911]: I0606 11:05:25.894548 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-58f798889d-4xw87_62a5da69-9938-40e9-944a-889b6a7fcc04/kube-rbac-proxy/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.101457 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-58f798889d-4xw87_62a5da69-9938-40e9-944a-889b6a7fcc04/manager/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.239975 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-67ff8584d-djvws_08567c0a-fcf6-4ed2-8477-bccd389f2a6d/operator/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.408509 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7779c57cf7-s7shc_0243dd73-76b2-4168-8106-14676305be39/kube-rbac-proxy/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.465824 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7779c57cf7-s7shc_0243dd73-76b2-4168-8106-14676305be39/manager/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.498516 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-764c8d9cbc-rtgz2_24fd9a67-5d42-4633-bc61-19f5975b18ff/manager/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.522067 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-884d667-t6d6w_4ed91cbd-66c2-4144-8770-c1495382976a/kube-rbac-proxy/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.733052 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-884d667-t6d6w_4ed91cbd-66c2-4144-8770-c1495382976a/manager/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.762009 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6db7bffb67-stlv6_5bddb4bd-da2f-405e-a72e-3b89c3526010/kube-rbac-proxy/0.log" Jun 06 11:05:26 crc kubenswrapper[4911]: I0606 11:05:26.869689 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6db7bffb67-stlv6_5bddb4bd-da2f-405e-a72e-3b89c3526010/manager/0.log" Jun 06 11:05:46 crc kubenswrapper[4911]: I0606 11:05:46.075184 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-h4q7d_acf3bc54-04c0-416f-bf4a-541244ac0074/control-plane-machine-set-operator/0.log" Jun 06 11:05:46 crc kubenswrapper[4911]: I0606 11:05:46.297873 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r56ns_22e821a6-f095-4c6e-ac9e-8484e31bd21e/kube-rbac-proxy/0.log" Jun 06 11:05:46 crc kubenswrapper[4911]: I0606 11:05:46.355430 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r56ns_22e821a6-f095-4c6e-ac9e-8484e31bd21e/machine-api-operator/0.log" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.075693 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-zqwfb_75f83d38-b516-4f79-b071-e57e93d6d35b/cert-manager-controller/0.log" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.304557 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-v4mqj_17fadf3c-2d8d-4b8d-93df-8fe99670ffde/cert-manager-cainjector/0.log" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.385367 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-8mhxf_cdc87445-bf03-4198-83c5-ff423ab48e27/cert-manager-webhook/0.log" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.737604 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-96l4x"] Jun 06 11:06:01 crc kubenswrapper[4911]: E0606 11:06:01.738402 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="672504fb-1d3e-45fe-b896-3a80f06d97f0" containerName="container-00" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.738431 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="672504fb-1d3e-45fe-b896-3a80f06d97f0" containerName="container-00" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.738695 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="672504fb-1d3e-45fe-b896-3a80f06d97f0" containerName="container-00" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.739798 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-96l4x" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.899247 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pn9p\" (UniqueName: \"kubernetes.io/projected/c51d505c-d11c-4820-9e7e-59c6f77ebc38-kube-api-access-7pn9p\") pod \"crc-debug-96l4x\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " pod="openstack/crc-debug-96l4x" Jun 06 11:06:01 crc kubenswrapper[4911]: I0606 11:06:01.899793 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51d505c-d11c-4820-9e7e-59c6f77ebc38-host\") pod \"crc-debug-96l4x\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " pod="openstack/crc-debug-96l4x" Jun 06 11:06:02 crc kubenswrapper[4911]: I0606 11:06:02.001954 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pn9p\" (UniqueName: \"kubernetes.io/projected/c51d505c-d11c-4820-9e7e-59c6f77ebc38-kube-api-access-7pn9p\") pod \"crc-debug-96l4x\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " pod="openstack/crc-debug-96l4x" Jun 06 11:06:02 crc kubenswrapper[4911]: I0606 11:06:02.002050 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51d505c-d11c-4820-9e7e-59c6f77ebc38-host\") pod \"crc-debug-96l4x\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " pod="openstack/crc-debug-96l4x" Jun 06 11:06:02 crc kubenswrapper[4911]: I0606 11:06:02.002222 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51d505c-d11c-4820-9e7e-59c6f77ebc38-host\") pod \"crc-debug-96l4x\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " pod="openstack/crc-debug-96l4x" Jun 06 11:06:02 crc kubenswrapper[4911]: I0606 11:06:02.037193 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pn9p\" (UniqueName: \"kubernetes.io/projected/c51d505c-d11c-4820-9e7e-59c6f77ebc38-kube-api-access-7pn9p\") pod \"crc-debug-96l4x\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " pod="openstack/crc-debug-96l4x" Jun 06 11:06:02 crc kubenswrapper[4911]: I0606 11:06:02.062703 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-96l4x" Jun 06 11:06:02 crc kubenswrapper[4911]: I0606 11:06:02.621696 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-96l4x" event={"ID":"c51d505c-d11c-4820-9e7e-59c6f77ebc38","Type":"ContainerStarted","Data":"9c471992eaa00e4bab536d2705efc369cc9ed7073f279efbbc6d640f088fd05c"} Jun 06 11:06:03 crc kubenswrapper[4911]: I0606 11:06:03.636175 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-96l4x" event={"ID":"c51d505c-d11c-4820-9e7e-59c6f77ebc38","Type":"ContainerStarted","Data":"d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6"} Jun 06 11:06:03 crc kubenswrapper[4911]: I0606 11:06:03.655015 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-96l4x" podStartSLOduration=2.654990828 podStartE2EDuration="2.654990828s" podCreationTimestamp="2025-06-06 11:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:06:03.651833686 +0000 UTC m=+6774.927259229" watchObservedRunningTime="2025-06-06 11:06:03.654990828 +0000 UTC m=+6774.930416371" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.046151 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-96l4x"] Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.047274 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-96l4x" podUID="c51d505c-d11c-4820-9e7e-59c6f77ebc38" containerName="container-00" containerID="cri-o://d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6" gracePeriod=2 Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.062453 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-96l4x"] Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.200503 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-96l4x" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.301220 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51d505c-d11c-4820-9e7e-59c6f77ebc38-host\") pod \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.301345 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pn9p\" (UniqueName: \"kubernetes.io/projected/c51d505c-d11c-4820-9e7e-59c6f77ebc38-kube-api-access-7pn9p\") pod \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\" (UID: \"c51d505c-d11c-4820-9e7e-59c6f77ebc38\") " Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.301369 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c51d505c-d11c-4820-9e7e-59c6f77ebc38-host" (OuterVolumeSpecName: "host") pod "c51d505c-d11c-4820-9e7e-59c6f77ebc38" (UID: "c51d505c-d11c-4820-9e7e-59c6f77ebc38"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.302116 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c51d505c-d11c-4820-9e7e-59c6f77ebc38-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.310141 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c51d505c-d11c-4820-9e7e-59c6f77ebc38-kube-api-access-7pn9p" (OuterVolumeSpecName: "kube-api-access-7pn9p") pod "c51d505c-d11c-4820-9e7e-59c6f77ebc38" (UID: "c51d505c-d11c-4820-9e7e-59c6f77ebc38"). InnerVolumeSpecName "kube-api-access-7pn9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.407240 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pn9p\" (UniqueName: \"kubernetes.io/projected/c51d505c-d11c-4820-9e7e-59c6f77ebc38-kube-api-access-7pn9p\") on node \"crc\" DevicePath \"\"" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.762187 4911 generic.go:334] "Generic (PLEG): container finished" podID="c51d505c-d11c-4820-9e7e-59c6f77ebc38" containerID="d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6" exitCode=0 Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.763394 4911 scope.go:117] "RemoveContainer" containerID="d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.763674 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-96l4x" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.797796 4911 scope.go:117] "RemoveContainer" containerID="d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6" Jun 06 11:06:13 crc kubenswrapper[4911]: E0606 11:06:13.798264 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6\": container with ID starting with d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6 not found: ID does not exist" containerID="d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.798309 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6"} err="failed to get container status \"d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6\": rpc error: code = NotFound desc = could not find container \"d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6\": container with ID starting with d9d00eb2899d0821acdd6bdd98b33c190e6654b67eeab8e902add4998a1a1ed6 not found: ID does not exist" Jun 06 11:06:13 crc kubenswrapper[4911]: I0606 11:06:13.970219 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c51d505c-d11c-4820-9e7e-59c6f77ebc38" path="/var/lib/kubelet/pods/c51d505c-d11c-4820-9e7e-59c6f77ebc38/volumes" Jun 06 11:06:16 crc kubenswrapper[4911]: I0606 11:06:16.488952 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7d9b68456c-zdv9k_d50d1712-3d8c-4212-9185-a4a2a186215c/nmstate-console-plugin/0.log" Jun 06 11:06:16 crc kubenswrapper[4911]: I0606 11:06:16.751050 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-47p8t_cbc7d963-711d-4639-a21e-f5876e0aec15/nmstate-handler/0.log" Jun 06 11:06:16 crc kubenswrapper[4911]: I0606 11:06:16.851195 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-748555f888-rl4mq_7f26fb38-be4a-43b1-b2ba-632ba751588c/kube-rbac-proxy/0.log" Jun 06 11:06:16 crc kubenswrapper[4911]: I0606 11:06:16.870066 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-748555f888-rl4mq_7f26fb38-be4a-43b1-b2ba-632ba751588c/nmstate-metrics/0.log" Jun 06 11:06:17 crc kubenswrapper[4911]: I0606 11:06:17.033506 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6c5f9d4654-mw7jx_abdedb83-822a-444e-a8ad-e843d4194830/nmstate-operator/0.log" Jun 06 11:06:17 crc kubenswrapper[4911]: I0606 11:06:17.152075 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-79c49d6bf4-kc7jr_27855136-8ad1-414e-896b-97ff98a45f34/nmstate-webhook/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.088034 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5f968f88cc-s8ttm_6c811786-a64d-49bb-93d8-88f5846c7462/kube-rbac-proxy/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.246777 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5f968f88cc-s8ttm_6c811786-a64d-49bb-93d8-88f5846c7462/controller/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.370014 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-frr-files/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.576518 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-reloader/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.591618 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-metrics/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.593976 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-frr-files/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.612461 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-reloader/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.899542 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-reloader/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.929682 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-frr-files/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.929753 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-metrics/0.log" Jun 06 11:06:34 crc kubenswrapper[4911]: I0606 11:06:34.930616 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-metrics/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.122644 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-reloader/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.124947 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-frr-files/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.175266 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/cp-metrics/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.208113 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/controller/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.399751 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/frr-metrics/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.400575 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/kube-rbac-proxy/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.453400 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/kube-rbac-proxy-frr/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.637354 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/reloader/0.log" Jun 06 11:06:35 crc kubenswrapper[4911]: I0606 11:06:35.740883 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-8457d999f9-ghvnk_aded57b8-2cbf-41c5-ada2-e7768d87ab83/frr-k8s-webhook-server/0.log" Jun 06 11:06:36 crc kubenswrapper[4911]: I0606 11:06:36.001737 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-58775bb567-66nbp_4967770d-b296-42f3-855d-46b34c89a0e4/manager/0.log" Jun 06 11:06:36 crc kubenswrapper[4911]: I0606 11:06:36.200872 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-547795769d-5msxn_9b6cf40e-e3a1-409d-a796-8a60840c0a07/webhook-server/0.log" Jun 06 11:06:36 crc kubenswrapper[4911]: I0606 11:06:36.321000 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bvtzh_5e555a27-714d-4b49-8e05-1cab47ab1a16/kube-rbac-proxy/0.log" Jun 06 11:06:37 crc kubenswrapper[4911]: I0606 11:06:37.305726 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bvtzh_5e555a27-714d-4b49-8e05-1cab47ab1a16/speaker/0.log" Jun 06 11:06:38 crc kubenswrapper[4911]: I0606 11:06:38.103233 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mxfmj_1e336f61-fb9b-45f7-ba82-f09f0d38d592/frr/0.log" Jun 06 11:06:51 crc kubenswrapper[4911]: I0606 11:06:51.963456 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k_7c2e9c04-daa4-4e63-8e6f-952d8118802b/util/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.169214 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k_7c2e9c04-daa4-4e63-8e6f-952d8118802b/util/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.230904 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k_7c2e9c04-daa4-4e63-8e6f-952d8118802b/pull/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.241281 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k_7c2e9c04-daa4-4e63-8e6f-952d8118802b/pull/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.446320 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k_7c2e9c04-daa4-4e63-8e6f-952d8118802b/util/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.476864 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k_7c2e9c04-daa4-4e63-8e6f-952d8118802b/pull/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.502799 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6589df99d30ac9cb6e2ff26885e3c29d10fbe97338967aa6e4a5a06c85kkm6k_7c2e9c04-daa4-4e63-8e6f-952d8118802b/extract/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.659305 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b_fd9d9e39-0195-4749-a5f4-c8c802a9f3f6/util/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.887721 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b_fd9d9e39-0195-4749-a5f4-c8c802a9f3f6/pull/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.892673 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b_fd9d9e39-0195-4749-a5f4-c8c802a9f3f6/pull/0.log" Jun 06 11:06:52 crc kubenswrapper[4911]: I0606 11:06:52.894809 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b_fd9d9e39-0195-4749-a5f4-c8c802a9f3f6/util/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.124076 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b_fd9d9e39-0195-4749-a5f4-c8c802a9f3f6/util/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.169592 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b_fd9d9e39-0195-4749-a5f4-c8c802a9f3f6/extract/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.275997 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8271ef8324a8ce7222c2cdbf8132ce018159fa7b9b3b470e4d6b480057k9k4b_fd9d9e39-0195-4749-a5f4-c8c802a9f3f6/pull/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.369826 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9nsqk_88393b7e-da87-4e83-8189-4ae89c499b77/extract-utilities/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.612960 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9nsqk_88393b7e-da87-4e83-8189-4ae89c499b77/extract-content/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.615017 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9nsqk_88393b7e-da87-4e83-8189-4ae89c499b77/extract-content/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.647872 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9nsqk_88393b7e-da87-4e83-8189-4ae89c499b77/extract-utilities/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.902977 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9nsqk_88393b7e-da87-4e83-8189-4ae89c499b77/extract-utilities/0.log" Jun 06 11:06:53 crc kubenswrapper[4911]: I0606 11:06:53.923522 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9nsqk_88393b7e-da87-4e83-8189-4ae89c499b77/extract-content/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.197804 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grr5v_8040a62a-fa39-41c8-a7fc-b28059b6e367/extract-utilities/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.300246 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.300327 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.432709 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grr5v_8040a62a-fa39-41c8-a7fc-b28059b6e367/extract-content/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.456705 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grr5v_8040a62a-fa39-41c8-a7fc-b28059b6e367/extract-utilities/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.493074 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9nsqk_88393b7e-da87-4e83-8189-4ae89c499b77/registry-server/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.578348 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grr5v_8040a62a-fa39-41c8-a7fc-b28059b6e367/extract-content/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.710406 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grr5v_8040a62a-fa39-41c8-a7fc-b28059b6e367/extract-content/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.714396 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grr5v_8040a62a-fa39-41c8-a7fc-b28059b6e367/extract-utilities/0.log" Jun 06 11:06:54 crc kubenswrapper[4911]: I0606 11:06:54.949683 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-r6jnl_18686ff3-8800-4c67-b287-5989dd4dd44e/marketplace-operator/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.034389 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-p29wq_b15e184d-dfb3-492c-a3aa-32e514493e6f/extract-utilities/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.238342 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-p29wq_b15e184d-dfb3-492c-a3aa-32e514493e6f/extract-utilities/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.244291 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-p29wq_b15e184d-dfb3-492c-a3aa-32e514493e6f/extract-content/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.260868 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-grr5v_8040a62a-fa39-41c8-a7fc-b28059b6e367/registry-server/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.296157 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-p29wq_b15e184d-dfb3-492c-a3aa-32e514493e6f/extract-content/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.542287 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rj7zg_7006e423-c587-4159-aba3-aa155251dee6/extract-utilities/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.616992 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-p29wq_b15e184d-dfb3-492c-a3aa-32e514493e6f/extract-utilities/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.637494 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-p29wq_b15e184d-dfb3-492c-a3aa-32e514493e6f/extract-content/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.838191 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rj7zg_7006e423-c587-4159-aba3-aa155251dee6/extract-utilities/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.871743 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rj7zg_7006e423-c587-4159-aba3-aa155251dee6/extract-content/0.log" Jun 06 11:06:55 crc kubenswrapper[4911]: I0606 11:06:55.957875 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rj7zg_7006e423-c587-4159-aba3-aa155251dee6/extract-content/0.log" Jun 06 11:06:56 crc kubenswrapper[4911]: I0606 11:06:56.019749 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-p29wq_b15e184d-dfb3-492c-a3aa-32e514493e6f/registry-server/0.log" Jun 06 11:06:56 crc kubenswrapper[4911]: I0606 11:06:56.155945 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rj7zg_7006e423-c587-4159-aba3-aa155251dee6/extract-utilities/0.log" Jun 06 11:06:56 crc kubenswrapper[4911]: I0606 11:06:56.189122 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rj7zg_7006e423-c587-4159-aba3-aa155251dee6/extract-content/0.log" Jun 06 11:06:57 crc kubenswrapper[4911]: I0606 11:06:57.725939 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rj7zg_7006e423-c587-4159-aba3-aa155251dee6/registry-server/0.log" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.475374 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-9xxwt"] Jun 06 11:07:01 crc kubenswrapper[4911]: E0606 11:07:01.476917 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c51d505c-d11c-4820-9e7e-59c6f77ebc38" containerName="container-00" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.476936 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="c51d505c-d11c-4820-9e7e-59c6f77ebc38" containerName="container-00" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.477211 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="c51d505c-d11c-4820-9e7e-59c6f77ebc38" containerName="container-00" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.478239 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9xxwt" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.548211 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk7dp\" (UniqueName: \"kubernetes.io/projected/bc55fea0-f9c4-4031-9cf2-ca4e45571624-kube-api-access-sk7dp\") pod \"crc-debug-9xxwt\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " pod="openstack/crc-debug-9xxwt" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.548726 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bc55fea0-f9c4-4031-9cf2-ca4e45571624-host\") pod \"crc-debug-9xxwt\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " pod="openstack/crc-debug-9xxwt" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.652076 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk7dp\" (UniqueName: \"kubernetes.io/projected/bc55fea0-f9c4-4031-9cf2-ca4e45571624-kube-api-access-sk7dp\") pod \"crc-debug-9xxwt\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " pod="openstack/crc-debug-9xxwt" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.652168 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bc55fea0-f9c4-4031-9cf2-ca4e45571624-host\") pod \"crc-debug-9xxwt\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " pod="openstack/crc-debug-9xxwt" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.652372 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bc55fea0-f9c4-4031-9cf2-ca4e45571624-host\") pod \"crc-debug-9xxwt\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " pod="openstack/crc-debug-9xxwt" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.692777 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk7dp\" (UniqueName: \"kubernetes.io/projected/bc55fea0-f9c4-4031-9cf2-ca4e45571624-kube-api-access-sk7dp\") pod \"crc-debug-9xxwt\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " pod="openstack/crc-debug-9xxwt" Jun 06 11:07:01 crc kubenswrapper[4911]: I0606 11:07:01.800265 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9xxwt" Jun 06 11:07:02 crc kubenswrapper[4911]: I0606 11:07:02.354517 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-9xxwt" event={"ID":"bc55fea0-f9c4-4031-9cf2-ca4e45571624","Type":"ContainerStarted","Data":"47825316a230489e448715ad7fb4e929635fec6337f138b75d42f8673eeff1e5"} Jun 06 11:07:02 crc kubenswrapper[4911]: I0606 11:07:02.354924 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-9xxwt" event={"ID":"bc55fea0-f9c4-4031-9cf2-ca4e45571624","Type":"ContainerStarted","Data":"dda0de9ad8e4ae4196f7b3c23c9b7aa242937e1b25a93dc9d9b78669f4c52861"} Jun 06 11:07:02 crc kubenswrapper[4911]: I0606 11:07:02.387549 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-9xxwt" podStartSLOduration=1.387525706 podStartE2EDuration="1.387525706s" podCreationTimestamp="2025-06-06 11:07:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:07:02.373323909 +0000 UTC m=+6833.648749472" watchObservedRunningTime="2025-06-06 11:07:02.387525706 +0000 UTC m=+6833.662951249" Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.382504 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-9xxwt"] Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.383738 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-9xxwt" podUID="bc55fea0-f9c4-4031-9cf2-ca4e45571624" containerName="container-00" containerID="cri-o://47825316a230489e448715ad7fb4e929635fec6337f138b75d42f8673eeff1e5" gracePeriod=2 Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.395781 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-9xxwt"] Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.505425 4911 generic.go:334] "Generic (PLEG): container finished" podID="bc55fea0-f9c4-4031-9cf2-ca4e45571624" containerID="47825316a230489e448715ad7fb4e929635fec6337f138b75d42f8673eeff1e5" exitCode=0 Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.505513 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dda0de9ad8e4ae4196f7b3c23c9b7aa242937e1b25a93dc9d9b78669f4c52861" Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.506486 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9xxwt" Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.606018 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk7dp\" (UniqueName: \"kubernetes.io/projected/bc55fea0-f9c4-4031-9cf2-ca4e45571624-kube-api-access-sk7dp\") pod \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.606379 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bc55fea0-f9c4-4031-9cf2-ca4e45571624-host\") pod \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\" (UID: \"bc55fea0-f9c4-4031-9cf2-ca4e45571624\") " Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.606852 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bc55fea0-f9c4-4031-9cf2-ca4e45571624-host" (OuterVolumeSpecName: "host") pod "bc55fea0-f9c4-4031-9cf2-ca4e45571624" (UID: "bc55fea0-f9c4-4031-9cf2-ca4e45571624"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.608085 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bc55fea0-f9c4-4031-9cf2-ca4e45571624-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.614076 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc55fea0-f9c4-4031-9cf2-ca4e45571624-kube-api-access-sk7dp" (OuterVolumeSpecName: "kube-api-access-sk7dp") pod "bc55fea0-f9c4-4031-9cf2-ca4e45571624" (UID: "bc55fea0-f9c4-4031-9cf2-ca4e45571624"). InnerVolumeSpecName "kube-api-access-sk7dp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.711402 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk7dp\" (UniqueName: \"kubernetes.io/projected/bc55fea0-f9c4-4031-9cf2-ca4e45571624-kube-api-access-sk7dp\") on node \"crc\" DevicePath \"\"" Jun 06 11:07:15 crc kubenswrapper[4911]: I0606 11:07:15.964898 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc55fea0-f9c4-4031-9cf2-ca4e45571624" path="/var/lib/kubelet/pods/bc55fea0-f9c4-4031-9cf2-ca4e45571624/volumes" Jun 06 11:07:16 crc kubenswrapper[4911]: I0606 11:07:16.515875 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-9xxwt" Jun 06 11:07:24 crc kubenswrapper[4911]: I0606 11:07:24.300361 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 11:07:24 crc kubenswrapper[4911]: I0606 11:07:24.301010 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 11:07:43 crc kubenswrapper[4911]: E0606 11:07:43.813284 4911 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.108:48490->38.129.56.108:33257: write tcp 38.129.56.108:48490->38.129.56.108:33257: write: broken pipe Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.300649 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.301580 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.301646 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.304179 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4a9223c433fb722a9b85a4df3f4ee18287263b354b671709a533ea9543f9f310"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.304265 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://4a9223c433fb722a9b85a4df3f4ee18287263b354b671709a533ea9543f9f310" gracePeriod=600 Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.982001 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="4a9223c433fb722a9b85a4df3f4ee18287263b354b671709a533ea9543f9f310" exitCode=0 Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.982114 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"4a9223c433fb722a9b85a4df3f4ee18287263b354b671709a533ea9543f9f310"} Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.982821 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerStarted","Data":"62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce"} Jun 06 11:07:54 crc kubenswrapper[4911]: I0606 11:07:54.982856 4911 scope.go:117] "RemoveContainer" containerID="33ae528c37d9488bcb8987e1235b873b6439d8d9c57d5b7825436dbe30359db6" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.740386 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-6smkt"] Jun 06 11:08:01 crc kubenswrapper[4911]: E0606 11:08:01.741602 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc55fea0-f9c4-4031-9cf2-ca4e45571624" containerName="container-00" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.741616 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc55fea0-f9c4-4031-9cf2-ca4e45571624" containerName="container-00" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.741820 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc55fea0-f9c4-4031-9cf2-ca4e45571624" containerName="container-00" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.742459 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6smkt" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.767832 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d35e73-4f94-4ba0-b81b-15c9be9f6628-host\") pod \"crc-debug-6smkt\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " pod="openstack/crc-debug-6smkt" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.768030 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp5mz\" (UniqueName: \"kubernetes.io/projected/41d35e73-4f94-4ba0-b81b-15c9be9f6628-kube-api-access-sp5mz\") pod \"crc-debug-6smkt\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " pod="openstack/crc-debug-6smkt" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.871047 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp5mz\" (UniqueName: \"kubernetes.io/projected/41d35e73-4f94-4ba0-b81b-15c9be9f6628-kube-api-access-sp5mz\") pod \"crc-debug-6smkt\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " pod="openstack/crc-debug-6smkt" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.871461 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d35e73-4f94-4ba0-b81b-15c9be9f6628-host\") pod \"crc-debug-6smkt\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " pod="openstack/crc-debug-6smkt" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.871665 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d35e73-4f94-4ba0-b81b-15c9be9f6628-host\") pod \"crc-debug-6smkt\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " pod="openstack/crc-debug-6smkt" Jun 06 11:08:01 crc kubenswrapper[4911]: I0606 11:08:01.895087 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp5mz\" (UniqueName: \"kubernetes.io/projected/41d35e73-4f94-4ba0-b81b-15c9be9f6628-kube-api-access-sp5mz\") pod \"crc-debug-6smkt\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " pod="openstack/crc-debug-6smkt" Jun 06 11:08:02 crc kubenswrapper[4911]: I0606 11:08:02.085122 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6smkt" Jun 06 11:08:03 crc kubenswrapper[4911]: I0606 11:08:03.089891 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6smkt" event={"ID":"41d35e73-4f94-4ba0-b81b-15c9be9f6628","Type":"ContainerStarted","Data":"abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6"} Jun 06 11:08:03 crc kubenswrapper[4911]: I0606 11:08:03.090507 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-6smkt" event={"ID":"41d35e73-4f94-4ba0-b81b-15c9be9f6628","Type":"ContainerStarted","Data":"2756cfeced96c7fb1e399c7117856dad55f4a8adf5b8ccd2783394bf470d4507"} Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.051569 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-6smkt" podStartSLOduration=12.051482833 podStartE2EDuration="12.051482833s" podCreationTimestamp="2025-06-06 11:08:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:08:03.114135144 +0000 UTC m=+6894.389560697" watchObservedRunningTime="2025-06-06 11:08:13.051482833 +0000 UTC m=+6904.326908396" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.059223 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-6smkt"] Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.059614 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-6smkt" podUID="41d35e73-4f94-4ba0-b81b-15c9be9f6628" containerName="container-00" containerID="cri-o://abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6" gracePeriod=2 Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.073605 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-6smkt"] Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.196134 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6smkt" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.223383 4911 generic.go:334] "Generic (PLEG): container finished" podID="41d35e73-4f94-4ba0-b81b-15c9be9f6628" containerID="abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6" exitCode=0 Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.223522 4911 scope.go:117] "RemoveContainer" containerID="abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.223979 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-6smkt" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.254909 4911 scope.go:117] "RemoveContainer" containerID="abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6" Jun 06 11:08:13 crc kubenswrapper[4911]: E0606 11:08:13.255816 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6\": container with ID starting with abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6 not found: ID does not exist" containerID="abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.255892 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6"} err="failed to get container status \"abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6\": rpc error: code = NotFound desc = could not find container \"abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6\": container with ID starting with abe15f630e2a7e5dae363f6d661d91958da2efa54132bde68769b63d3577f6b6 not found: ID does not exist" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.313661 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp5mz\" (UniqueName: \"kubernetes.io/projected/41d35e73-4f94-4ba0-b81b-15c9be9f6628-kube-api-access-sp5mz\") pod \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.313890 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d35e73-4f94-4ba0-b81b-15c9be9f6628-host\") pod \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\" (UID: \"41d35e73-4f94-4ba0-b81b-15c9be9f6628\") " Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.314314 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41d35e73-4f94-4ba0-b81b-15c9be9f6628-host" (OuterVolumeSpecName: "host") pod "41d35e73-4f94-4ba0-b81b-15c9be9f6628" (UID: "41d35e73-4f94-4ba0-b81b-15c9be9f6628"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.314924 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d35e73-4f94-4ba0-b81b-15c9be9f6628-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.337558 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41d35e73-4f94-4ba0-b81b-15c9be9f6628-kube-api-access-sp5mz" (OuterVolumeSpecName: "kube-api-access-sp5mz") pod "41d35e73-4f94-4ba0-b81b-15c9be9f6628" (UID: "41d35e73-4f94-4ba0-b81b-15c9be9f6628"). InnerVolumeSpecName "kube-api-access-sp5mz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.418376 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp5mz\" (UniqueName: \"kubernetes.io/projected/41d35e73-4f94-4ba0-b81b-15c9be9f6628-kube-api-access-sp5mz\") on node \"crc\" DevicePath \"\"" Jun 06 11:08:13 crc kubenswrapper[4911]: I0606 11:08:13.963942 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41d35e73-4f94-4ba0-b81b-15c9be9f6628" path="/var/lib/kubelet/pods/41d35e73-4f94-4ba0-b81b-15c9be9f6628/volumes" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.570059 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bpmr7"] Jun 06 11:08:45 crc kubenswrapper[4911]: E0606 11:08:45.571436 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41d35e73-4f94-4ba0-b81b-15c9be9f6628" containerName="container-00" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.571450 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="41d35e73-4f94-4ba0-b81b-15c9be9f6628" containerName="container-00" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.571659 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="41d35e73-4f94-4ba0-b81b-15c9be9f6628" containerName="container-00" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.573016 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.587734 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bpmr7"] Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.603430 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/288d596e-a3d8-4701-bfe5-115502532581-utilities\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.603621 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs987\" (UniqueName: \"kubernetes.io/projected/288d596e-a3d8-4701-bfe5-115502532581-kube-api-access-fs987\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.603751 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/288d596e-a3d8-4701-bfe5-115502532581-catalog-content\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.706280 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/288d596e-a3d8-4701-bfe5-115502532581-utilities\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.706779 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs987\" (UniqueName: \"kubernetes.io/projected/288d596e-a3d8-4701-bfe5-115502532581-kube-api-access-fs987\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.706864 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/288d596e-a3d8-4701-bfe5-115502532581-catalog-content\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.707574 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/288d596e-a3d8-4701-bfe5-115502532581-catalog-content\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.707814 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/288d596e-a3d8-4701-bfe5-115502532581-utilities\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.732317 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs987\" (UniqueName: \"kubernetes.io/projected/288d596e-a3d8-4701-bfe5-115502532581-kube-api-access-fs987\") pod \"community-operators-bpmr7\" (UID: \"288d596e-a3d8-4701-bfe5-115502532581\") " pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:45 crc kubenswrapper[4911]: I0606 11:08:45.902719 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:46 crc kubenswrapper[4911]: I0606 11:08:46.709389 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bpmr7"] Jun 06 11:08:47 crc kubenswrapper[4911]: E0606 11:08:47.190430 4911 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod288d596e_a3d8_4701_bfe5_115502532581.slice/crio-conmon-3a7dce3ceb2e34f091d9da4ecd9a3a1e20004b221899c369718c13d4ef52c1e4.scope\": RecentStats: unable to find data in memory cache]" Jun 06 11:08:47 crc kubenswrapper[4911]: I0606 11:08:47.625251 4911 generic.go:334] "Generic (PLEG): container finished" podID="288d596e-a3d8-4701-bfe5-115502532581" containerID="3a7dce3ceb2e34f091d9da4ecd9a3a1e20004b221899c369718c13d4ef52c1e4" exitCode=0 Jun 06 11:08:47 crc kubenswrapper[4911]: I0606 11:08:47.625317 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpmr7" event={"ID":"288d596e-a3d8-4701-bfe5-115502532581","Type":"ContainerDied","Data":"3a7dce3ceb2e34f091d9da4ecd9a3a1e20004b221899c369718c13d4ef52c1e4"} Jun 06 11:08:47 crc kubenswrapper[4911]: I0606 11:08:47.625357 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpmr7" event={"ID":"288d596e-a3d8-4701-bfe5-115502532581","Type":"ContainerStarted","Data":"c6e03872d785810bc4ef65783f32a13668a2a8f36befdcd0605a0d8ca3e5f447"} Jun 06 11:08:52 crc kubenswrapper[4911]: I0606 11:08:52.688405 4911 generic.go:334] "Generic (PLEG): container finished" podID="288d596e-a3d8-4701-bfe5-115502532581" containerID="b0ce5900e12d91e26bb1b5112a361e35d4e7683bcd9098152272e64c40f06464" exitCode=0 Jun 06 11:08:52 crc kubenswrapper[4911]: I0606 11:08:52.688523 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpmr7" event={"ID":"288d596e-a3d8-4701-bfe5-115502532581","Type":"ContainerDied","Data":"b0ce5900e12d91e26bb1b5112a361e35d4e7683bcd9098152272e64c40f06464"} Jun 06 11:08:53 crc kubenswrapper[4911]: I0606 11:08:53.704444 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpmr7" event={"ID":"288d596e-a3d8-4701-bfe5-115502532581","Type":"ContainerStarted","Data":"687e86478a94005bfc7ba160c5934ef6d26832db7407ab5f5c4235b91c3e0048"} Jun 06 11:08:53 crc kubenswrapper[4911]: I0606 11:08:53.729540 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bpmr7" podStartSLOduration=3.069758802 podStartE2EDuration="8.729520366s" podCreationTimestamp="2025-06-06 11:08:45 +0000 UTC" firstStartedPulling="2025-06-06 11:08:47.627920843 +0000 UTC m=+6938.903346386" lastFinishedPulling="2025-06-06 11:08:53.287682407 +0000 UTC m=+6944.563107950" observedRunningTime="2025-06-06 11:08:53.724785973 +0000 UTC m=+6945.000211516" watchObservedRunningTime="2025-06-06 11:08:53.729520366 +0000 UTC m=+6945.004945909" Jun 06 11:08:55 crc kubenswrapper[4911]: I0606 11:08:55.903590 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:55 crc kubenswrapper[4911]: I0606 11:08:55.904475 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:08:55 crc kubenswrapper[4911]: I0606 11:08:55.981838 4911 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.459065 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-rwjhf"] Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.461252 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.479220 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fd7f886-189b-4d97-a1a2-aad015bf8994-host\") pod \"crc-debug-rwjhf\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.479723 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b6cc\" (UniqueName: \"kubernetes.io/projected/1fd7f886-189b-4d97-a1a2-aad015bf8994-kube-api-access-7b6cc\") pod \"crc-debug-rwjhf\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.582086 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fd7f886-189b-4d97-a1a2-aad015bf8994-host\") pod \"crc-debug-rwjhf\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.582617 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b6cc\" (UniqueName: \"kubernetes.io/projected/1fd7f886-189b-4d97-a1a2-aad015bf8994-kube-api-access-7b6cc\") pod \"crc-debug-rwjhf\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.583173 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fd7f886-189b-4d97-a1a2-aad015bf8994-host\") pod \"crc-debug-rwjhf\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.611947 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b6cc\" (UniqueName: \"kubernetes.io/projected/1fd7f886-189b-4d97-a1a2-aad015bf8994-kube-api-access-7b6cc\") pod \"crc-debug-rwjhf\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: I0606 11:09:01.789389 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rwjhf" Jun 06 11:09:01 crc kubenswrapper[4911]: W0606 11:09:01.838137 4911 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fd7f886_189b_4d97_a1a2_aad015bf8994.slice/crio-60a5ae255caa4089dcc755c16770fa84d613533db32341c2c4e0ec505f6ca43b WatchSource:0}: Error finding container 60a5ae255caa4089dcc755c16770fa84d613533db32341c2c4e0ec505f6ca43b: Status 404 returned error can't find the container with id 60a5ae255caa4089dcc755c16770fa84d613533db32341c2c4e0ec505f6ca43b Jun 06 11:09:02 crc kubenswrapper[4911]: I0606 11:09:02.823112 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-rwjhf" event={"ID":"1fd7f886-189b-4d97-a1a2-aad015bf8994","Type":"ContainerStarted","Data":"e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1"} Jun 06 11:09:02 crc kubenswrapper[4911]: I0606 11:09:02.823619 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-rwjhf" event={"ID":"1fd7f886-189b-4d97-a1a2-aad015bf8994","Type":"ContainerStarted","Data":"60a5ae255caa4089dcc755c16770fa84d613533db32341c2c4e0ec505f6ca43b"} Jun 06 11:09:02 crc kubenswrapper[4911]: I0606 11:09:02.852220 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-rwjhf" podStartSLOduration=1.852191232 podStartE2EDuration="1.852191232s" podCreationTimestamp="2025-06-06 11:09:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:09:02.840837449 +0000 UTC m=+6954.116263002" watchObservedRunningTime="2025-06-06 11:09:02.852191232 +0000 UTC m=+6954.127616775" Jun 06 11:09:05 crc kubenswrapper[4911]: I0606 11:09:05.963476 4911 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bpmr7" Jun 06 11:09:06 crc kubenswrapper[4911]: I0606 11:09:06.054057 4911 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bpmr7"] Jun 06 11:09:06 crc kubenswrapper[4911]: I0606 11:09:06.106855 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grr5v"] Jun 06 11:09:06 crc kubenswrapper[4911]: I0606 11:09:06.107591 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-grr5v" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="registry-server" containerID="cri-o://cd5ce51de56833cd29c72fe92f3d08438b2271bf5fcf7b58ae696cfa9a6a1b47" gracePeriod=2 Jun 06 11:09:06 crc kubenswrapper[4911]: I0606 11:09:06.881170 4911 generic.go:334] "Generic (PLEG): container finished" podID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerID="cd5ce51de56833cd29c72fe92f3d08438b2271bf5fcf7b58ae696cfa9a6a1b47" exitCode=0 Jun 06 11:09:06 crc kubenswrapper[4911]: I0606 11:09:06.883113 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grr5v" event={"ID":"8040a62a-fa39-41c8-a7fc-b28059b6e367","Type":"ContainerDied","Data":"cd5ce51de56833cd29c72fe92f3d08438b2271bf5fcf7b58ae696cfa9a6a1b47"} Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.507266 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grr5v" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.637195 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-utilities\") pod \"8040a62a-fa39-41c8-a7fc-b28059b6e367\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.637286 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-catalog-content\") pod \"8040a62a-fa39-41c8-a7fc-b28059b6e367\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.637414 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxbfh\" (UniqueName: \"kubernetes.io/projected/8040a62a-fa39-41c8-a7fc-b28059b6e367-kube-api-access-kxbfh\") pod \"8040a62a-fa39-41c8-a7fc-b28059b6e367\" (UID: \"8040a62a-fa39-41c8-a7fc-b28059b6e367\") " Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.637867 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-utilities" (OuterVolumeSpecName: "utilities") pod "8040a62a-fa39-41c8-a7fc-b28059b6e367" (UID: "8040a62a-fa39-41c8-a7fc-b28059b6e367"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.638046 4911 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-utilities\") on node \"crc\" DevicePath \"\"" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.647369 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8040a62a-fa39-41c8-a7fc-b28059b6e367-kube-api-access-kxbfh" (OuterVolumeSpecName: "kube-api-access-kxbfh") pod "8040a62a-fa39-41c8-a7fc-b28059b6e367" (UID: "8040a62a-fa39-41c8-a7fc-b28059b6e367"). InnerVolumeSpecName "kube-api-access-kxbfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.720164 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8040a62a-fa39-41c8-a7fc-b28059b6e367" (UID: "8040a62a-fa39-41c8-a7fc-b28059b6e367"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.742313 4911 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8040a62a-fa39-41c8-a7fc-b28059b6e367-catalog-content\") on node \"crc\" DevicePath \"\"" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.742361 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxbfh\" (UniqueName: \"kubernetes.io/projected/8040a62a-fa39-41c8-a7fc-b28059b6e367-kube-api-access-kxbfh\") on node \"crc\" DevicePath \"\"" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.898158 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grr5v" event={"ID":"8040a62a-fa39-41c8-a7fc-b28059b6e367","Type":"ContainerDied","Data":"2597b12f1256e79c1f5411e1c3f678f5d4cd4594d6ba5f89733d15758db4e948"} Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.898260 4911 scope.go:117] "RemoveContainer" containerID="cd5ce51de56833cd29c72fe92f3d08438b2271bf5fcf7b58ae696cfa9a6a1b47" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.898272 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grr5v" Jun 06 11:09:07 crc kubenswrapper[4911]: I0606 11:09:07.958588 4911 scope.go:117] "RemoveContainer" containerID="7855574e7907283d59c95505c66a68ee19754fedfd47c58a8c37a2b69864a41f" Jun 06 11:09:08 crc kubenswrapper[4911]: I0606 11:09:08.015322 4911 scope.go:117] "RemoveContainer" containerID="b37f8ddea9985b16fb9d785f7307d79ea55fa97b714de6b068e407d7e885cbcf" Jun 06 11:09:08 crc kubenswrapper[4911]: I0606 11:09:08.035326 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grr5v"] Jun 06 11:09:08 crc kubenswrapper[4911]: I0606 11:09:08.035823 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-grr5v"] Jun 06 11:09:09 crc kubenswrapper[4911]: I0606 11:09:09.967804 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" path="/var/lib/kubelet/pods/8040a62a-fa39-41c8-a7fc-b28059b6e367/volumes" Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.598638 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-rwjhf"] Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.599677 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-rwjhf" podUID="1fd7f886-189b-4d97-a1a2-aad015bf8994" containerName="container-00" containerID="cri-o://e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1" gracePeriod=2 Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.624303 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-rwjhf"] Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.764555 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rwjhf" Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.812721 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fd7f886-189b-4d97-a1a2-aad015bf8994-host\") pod \"1fd7f886-189b-4d97-a1a2-aad015bf8994\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.812890 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b6cc\" (UniqueName: \"kubernetes.io/projected/1fd7f886-189b-4d97-a1a2-aad015bf8994-kube-api-access-7b6cc\") pod \"1fd7f886-189b-4d97-a1a2-aad015bf8994\" (UID: \"1fd7f886-189b-4d97-a1a2-aad015bf8994\") " Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.813851 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fd7f886-189b-4d97-a1a2-aad015bf8994-host" (OuterVolumeSpecName: "host") pod "1fd7f886-189b-4d97-a1a2-aad015bf8994" (UID: "1fd7f886-189b-4d97-a1a2-aad015bf8994"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.821342 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd7f886-189b-4d97-a1a2-aad015bf8994-kube-api-access-7b6cc" (OuterVolumeSpecName: "kube-api-access-7b6cc") pod "1fd7f886-189b-4d97-a1a2-aad015bf8994" (UID: "1fd7f886-189b-4d97-a1a2-aad015bf8994"). InnerVolumeSpecName "kube-api-access-7b6cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.915411 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b6cc\" (UniqueName: \"kubernetes.io/projected/1fd7f886-189b-4d97-a1a2-aad015bf8994-kube-api-access-7b6cc\") on node \"crc\" DevicePath \"\"" Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.915458 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fd7f886-189b-4d97-a1a2-aad015bf8994-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.967776 4911 generic.go:334] "Generic (PLEG): container finished" podID="1fd7f886-189b-4d97-a1a2-aad015bf8994" containerID="e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1" exitCode=0 Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.967840 4911 scope.go:117] "RemoveContainer" containerID="e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1" Jun 06 11:09:12 crc kubenswrapper[4911]: I0606 11:09:12.968286 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-rwjhf" Jun 06 11:09:13 crc kubenswrapper[4911]: I0606 11:09:13.016828 4911 scope.go:117] "RemoveContainer" containerID="e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1" Jun 06 11:09:13 crc kubenswrapper[4911]: E0606 11:09:13.019328 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1\": container with ID starting with e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1 not found: ID does not exist" containerID="e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1" Jun 06 11:09:13 crc kubenswrapper[4911]: I0606 11:09:13.019385 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1"} err="failed to get container status \"e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1\": rpc error: code = NotFound desc = could not find container \"e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1\": container with ID starting with e8ac5f78ec92a835d16104b0c25008dd0864943ab4113060d82fb29140b736a1 not found: ID does not exist" Jun 06 11:09:13 crc kubenswrapper[4911]: I0606 11:09:13.964471 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fd7f886-189b-4d97-a1a2-aad015bf8994" path="/var/lib/kubelet/pods/1fd7f886-189b-4d97-a1a2-aad015bf8994/volumes" Jun 06 11:09:25 crc kubenswrapper[4911]: I0606 11:09:25.746008 4911 scope.go:117] "RemoveContainer" containerID="88bf0866b71b196c17e5f681a200bff5842a537d51ed00f90bcb4746a45a886b" Jun 06 11:09:51 crc kubenswrapper[4911]: I0606 11:09:51.400293 4911 generic.go:334] "Generic (PLEG): container finished" podID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerID="d13ec06fe2fd28ee2ea2f78185994d76ad29418ee5ef90f49315ea7cb3fd1c4b" exitCode=0 Jun 06 11:09:51 crc kubenswrapper[4911]: I0606 11:09:51.402438 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zrwfs/must-gather-nljpf" event={"ID":"a572218a-ecfd-45ec-8d89-0489cb95a11b","Type":"ContainerDied","Data":"d13ec06fe2fd28ee2ea2f78185994d76ad29418ee5ef90f49315ea7cb3fd1c4b"} Jun 06 11:09:51 crc kubenswrapper[4911]: I0606 11:09:51.403612 4911 scope.go:117] "RemoveContainer" containerID="d13ec06fe2fd28ee2ea2f78185994d76ad29418ee5ef90f49315ea7cb3fd1c4b" Jun 06 11:09:51 crc kubenswrapper[4911]: I0606 11:09:51.928505 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zrwfs_must-gather-nljpf_a572218a-ecfd-45ec-8d89-0489cb95a11b/gather/0.log" Jun 06 11:09:54 crc kubenswrapper[4911]: I0606 11:09:54.301330 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 11:09:54 crc kubenswrapper[4911]: I0606 11:09:54.302107 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 11:10:01 crc kubenswrapper[4911]: I0606 11:10:01.373079 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zrwfs/must-gather-nljpf"] Jun 06 11:10:01 crc kubenswrapper[4911]: I0606 11:10:01.374497 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-zrwfs/must-gather-nljpf" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerName="copy" containerID="cri-o://5de373ae6f1c5c021797cd5afcc29caf1fc9639afa403cc3ced3c6d8e08aff20" gracePeriod=2 Jun 06 11:10:01 crc kubenswrapper[4911]: I0606 11:10:01.388064 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zrwfs/must-gather-nljpf"] Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.115939 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-d92fj"] Jun 06 11:10:02 crc kubenswrapper[4911]: E0606 11:10:02.117280 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd7f886-189b-4d97-a1a2-aad015bf8994" containerName="container-00" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117300 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd7f886-189b-4d97-a1a2-aad015bf8994" containerName="container-00" Jun 06 11:10:02 crc kubenswrapper[4911]: E0606 11:10:02.117334 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerName="copy" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117341 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerName="copy" Jun 06 11:10:02 crc kubenswrapper[4911]: E0606 11:10:02.117362 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="extract-utilities" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117369 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="extract-utilities" Jun 06 11:10:02 crc kubenswrapper[4911]: E0606 11:10:02.117385 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="extract-content" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117392 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="extract-content" Jun 06 11:10:02 crc kubenswrapper[4911]: E0606 11:10:02.117410 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="registry-server" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117418 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="registry-server" Jun 06 11:10:02 crc kubenswrapper[4911]: E0606 11:10:02.117436 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerName="gather" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117443 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerName="gather" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117639 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerName="gather" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117664 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="8040a62a-fa39-41c8-a7fc-b28059b6e367" containerName="registry-server" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117673 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerName="copy" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.117692 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fd7f886-189b-4d97-a1a2-aad015bf8994" containerName="container-00" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.118568 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.198113 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9eaba32b-8c39-49c3-aa69-3a5335c08e86-host\") pod \"crc-debug-d92fj\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.198217 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fplvh\" (UniqueName: \"kubernetes.io/projected/9eaba32b-8c39-49c3-aa69-3a5335c08e86-kube-api-access-fplvh\") pod \"crc-debug-d92fj\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.302112 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9eaba32b-8c39-49c3-aa69-3a5335c08e86-host\") pod \"crc-debug-d92fj\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.302380 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9eaba32b-8c39-49c3-aa69-3a5335c08e86-host\") pod \"crc-debug-d92fj\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.303067 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fplvh\" (UniqueName: \"kubernetes.io/projected/9eaba32b-8c39-49c3-aa69-3a5335c08e86-kube-api-access-fplvh\") pod \"crc-debug-d92fj\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.331805 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fplvh\" (UniqueName: \"kubernetes.io/projected/9eaba32b-8c39-49c3-aa69-3a5335c08e86-kube-api-access-fplvh\") pod \"crc-debug-d92fj\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.448301 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d92fj" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.596284 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zrwfs_must-gather-nljpf_a572218a-ecfd-45ec-8d89-0489cb95a11b/copy/0.log" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.597799 4911 generic.go:334] "Generic (PLEG): container finished" podID="a572218a-ecfd-45ec-8d89-0489cb95a11b" containerID="5de373ae6f1c5c021797cd5afcc29caf1fc9639afa403cc3ced3c6d8e08aff20" exitCode=143 Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.597878 4911 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb5b4a79dadd08b6bfa8c2ca8778d9b151857a0194dc622908a41003376161c8" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.641746 4911 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zrwfs_must-gather-nljpf_a572218a-ecfd-45ec-8d89-0489cb95a11b/copy/0.log" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.642724 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.730106 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbx78\" (UniqueName: \"kubernetes.io/projected/a572218a-ecfd-45ec-8d89-0489cb95a11b-kube-api-access-qbx78\") pod \"a572218a-ecfd-45ec-8d89-0489cb95a11b\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.730189 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a572218a-ecfd-45ec-8d89-0489cb95a11b-must-gather-output\") pod \"a572218a-ecfd-45ec-8d89-0489cb95a11b\" (UID: \"a572218a-ecfd-45ec-8d89-0489cb95a11b\") " Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.737421 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a572218a-ecfd-45ec-8d89-0489cb95a11b-kube-api-access-qbx78" (OuterVolumeSpecName: "kube-api-access-qbx78") pod "a572218a-ecfd-45ec-8d89-0489cb95a11b" (UID: "a572218a-ecfd-45ec-8d89-0489cb95a11b"). InnerVolumeSpecName "kube-api-access-qbx78". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.833470 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbx78\" (UniqueName: \"kubernetes.io/projected/a572218a-ecfd-45ec-8d89-0489cb95a11b-kube-api-access-qbx78\") on node \"crc\" DevicePath \"\"" Jun 06 11:10:02 crc kubenswrapper[4911]: I0606 11:10:02.942427 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a572218a-ecfd-45ec-8d89-0489cb95a11b-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a572218a-ecfd-45ec-8d89-0489cb95a11b" (UID: "a572218a-ecfd-45ec-8d89-0489cb95a11b"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jun 06 11:10:03 crc kubenswrapper[4911]: I0606 11:10:03.037865 4911 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a572218a-ecfd-45ec-8d89-0489cb95a11b-must-gather-output\") on node \"crc\" DevicePath \"\"" Jun 06 11:10:03 crc kubenswrapper[4911]: I0606 11:10:03.613239 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zrwfs/must-gather-nljpf" Jun 06 11:10:03 crc kubenswrapper[4911]: I0606 11:10:03.613252 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-d92fj" event={"ID":"9eaba32b-8c39-49c3-aa69-3a5335c08e86","Type":"ContainerStarted","Data":"d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1"} Jun 06 11:10:03 crc kubenswrapper[4911]: I0606 11:10:03.613817 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-d92fj" event={"ID":"9eaba32b-8c39-49c3-aa69-3a5335c08e86","Type":"ContainerStarted","Data":"694af9e50e8b93f05896203869bfe867674e19b0db4c33668ff0c646b0145bd0"} Jun 06 11:10:03 crc kubenswrapper[4911]: I0606 11:10:03.638645 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-d92fj" podStartSLOduration=1.6386190470000002 podStartE2EDuration="1.638619047s" podCreationTimestamp="2025-06-06 11:10:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:10:03.63215381 +0000 UTC m=+7014.907579353" watchObservedRunningTime="2025-06-06 11:10:03.638619047 +0000 UTC m=+7014.914044590" Jun 06 11:10:03 crc kubenswrapper[4911]: I0606 11:10:03.959558 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a572218a-ecfd-45ec-8d89-0489cb95a11b" path="/var/lib/kubelet/pods/a572218a-ecfd-45ec-8d89-0489cb95a11b/volumes" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.339298 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-d92fj"] Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.340244 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-d92fj" podUID="9eaba32b-8c39-49c3-aa69-3a5335c08e86" containerName="container-00" containerID="cri-o://d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1" gracePeriod=2 Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.349395 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-d92fj"] Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.450311 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d92fj" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.527008 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fplvh\" (UniqueName: \"kubernetes.io/projected/9eaba32b-8c39-49c3-aa69-3a5335c08e86-kube-api-access-fplvh\") pod \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.527432 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9eaba32b-8c39-49c3-aa69-3a5335c08e86-host\") pod \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\" (UID: \"9eaba32b-8c39-49c3-aa69-3a5335c08e86\") " Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.528306 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9eaba32b-8c39-49c3-aa69-3a5335c08e86-host" (OuterVolumeSpecName: "host") pod "9eaba32b-8c39-49c3-aa69-3a5335c08e86" (UID: "9eaba32b-8c39-49c3-aa69-3a5335c08e86"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.539543 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eaba32b-8c39-49c3-aa69-3a5335c08e86-kube-api-access-fplvh" (OuterVolumeSpecName: "kube-api-access-fplvh") pod "9eaba32b-8c39-49c3-aa69-3a5335c08e86" (UID: "9eaba32b-8c39-49c3-aa69-3a5335c08e86"). InnerVolumeSpecName "kube-api-access-fplvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.631769 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fplvh\" (UniqueName: \"kubernetes.io/projected/9eaba32b-8c39-49c3-aa69-3a5335c08e86-kube-api-access-fplvh\") on node \"crc\" DevicePath \"\"" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.631849 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9eaba32b-8c39-49c3-aa69-3a5335c08e86-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.734241 4911 generic.go:334] "Generic (PLEG): container finished" podID="9eaba32b-8c39-49c3-aa69-3a5335c08e86" containerID="d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1" exitCode=0 Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.734332 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-d92fj" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.734345 4911 scope.go:117] "RemoveContainer" containerID="d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.767644 4911 scope.go:117] "RemoveContainer" containerID="d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1" Jun 06 11:10:13 crc kubenswrapper[4911]: E0606 11:10:13.768203 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1\": container with ID starting with d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1 not found: ID does not exist" containerID="d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.768260 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1"} err="failed to get container status \"d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1\": rpc error: code = NotFound desc = could not find container \"d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1\": container with ID starting with d3f66e96bc22bd9283dbb011f86d879d2c21adefdce28b634df8b0f45468faf1 not found: ID does not exist" Jun 06 11:10:13 crc kubenswrapper[4911]: I0606 11:10:13.966443 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eaba32b-8c39-49c3-aa69-3a5335c08e86" path="/var/lib/kubelet/pods/9eaba32b-8c39-49c3-aa69-3a5335c08e86/volumes" Jun 06 11:10:24 crc kubenswrapper[4911]: I0606 11:10:24.300750 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 11:10:24 crc kubenswrapper[4911]: I0606 11:10:24.301589 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 11:10:25 crc kubenswrapper[4911]: I0606 11:10:25.877201 4911 scope.go:117] "RemoveContainer" containerID="5de373ae6f1c5c021797cd5afcc29caf1fc9639afa403cc3ced3c6d8e08aff20" Jun 06 11:10:25 crc kubenswrapper[4911]: I0606 11:10:25.906604 4911 scope.go:117] "RemoveContainer" containerID="d13ec06fe2fd28ee2ea2f78185994d76ad29418ee5ef90f49315ea7cb3fd1c4b" Jun 06 11:10:54 crc kubenswrapper[4911]: I0606 11:10:54.300804 4911 patch_prober.go:28] interesting pod/machine-config-daemon-sz44k container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jun 06 11:10:54 crc kubenswrapper[4911]: I0606 11:10:54.301655 4911 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jun 06 11:10:54 crc kubenswrapper[4911]: I0606 11:10:54.301743 4911 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" Jun 06 11:10:54 crc kubenswrapper[4911]: I0606 11:10:54.303224 4911 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce"} pod="openshift-machine-config-operator/machine-config-daemon-sz44k" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jun 06 11:10:54 crc kubenswrapper[4911]: I0606 11:10:54.303309 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerName="machine-config-daemon" containerID="cri-o://62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce" gracePeriod=600 Jun 06 11:10:54 crc kubenswrapper[4911]: E0606 11:10:54.427854 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:10:55 crc kubenswrapper[4911]: I0606 11:10:55.284655 4911 generic.go:334] "Generic (PLEG): container finished" podID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" containerID="62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce" exitCode=0 Jun 06 11:10:55 crc kubenswrapper[4911]: I0606 11:10:55.284746 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" event={"ID":"524ab803-c5fe-443c-8a85-b3f0a34b8a55","Type":"ContainerDied","Data":"62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce"} Jun 06 11:10:55 crc kubenswrapper[4911]: I0606 11:10:55.285197 4911 scope.go:117] "RemoveContainer" containerID="4a9223c433fb722a9b85a4df3f4ee18287263b354b671709a533ea9543f9f310" Jun 06 11:10:55 crc kubenswrapper[4911]: I0606 11:10:55.286597 4911 scope.go:117] "RemoveContainer" containerID="62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce" Jun 06 11:10:55 crc kubenswrapper[4911]: E0606 11:10:55.287118 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.707568 4911 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/crc-debug-m9bb6"] Jun 06 11:11:01 crc kubenswrapper[4911]: E0606 11:11:01.709083 4911 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eaba32b-8c39-49c3-aa69-3a5335c08e86" containerName="container-00" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.709236 4911 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eaba32b-8c39-49c3-aa69-3a5335c08e86" containerName="container-00" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.709507 4911 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eaba32b-8c39-49c3-aa69-3a5335c08e86" containerName="container-00" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.710329 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m9bb6" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.856469 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cca42818-913f-4486-9e4a-b397ac2fb4de-host\") pod \"crc-debug-m9bb6\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " pod="openstack/crc-debug-m9bb6" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.857014 4911 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsj7m\" (UniqueName: \"kubernetes.io/projected/cca42818-913f-4486-9e4a-b397ac2fb4de-kube-api-access-gsj7m\") pod \"crc-debug-m9bb6\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " pod="openstack/crc-debug-m9bb6" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.958873 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cca42818-913f-4486-9e4a-b397ac2fb4de-host\") pod \"crc-debug-m9bb6\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " pod="openstack/crc-debug-m9bb6" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.958973 4911 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsj7m\" (UniqueName: \"kubernetes.io/projected/cca42818-913f-4486-9e4a-b397ac2fb4de-kube-api-access-gsj7m\") pod \"crc-debug-m9bb6\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " pod="openstack/crc-debug-m9bb6" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.959320 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cca42818-913f-4486-9e4a-b397ac2fb4de-host\") pod \"crc-debug-m9bb6\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " pod="openstack/crc-debug-m9bb6" Jun 06 11:11:01 crc kubenswrapper[4911]: I0606 11:11:01.991951 4911 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsj7m\" (UniqueName: \"kubernetes.io/projected/cca42818-913f-4486-9e4a-b397ac2fb4de-kube-api-access-gsj7m\") pod \"crc-debug-m9bb6\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " pod="openstack/crc-debug-m9bb6" Jun 06 11:11:02 crc kubenswrapper[4911]: I0606 11:11:02.033634 4911 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m9bb6" Jun 06 11:11:02 crc kubenswrapper[4911]: I0606 11:11:02.393273 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-m9bb6" event={"ID":"cca42818-913f-4486-9e4a-b397ac2fb4de","Type":"ContainerStarted","Data":"4916b547dd3fd433cb61e21c8498344a58563c19f83d14424fdc0a4b8e65bdc7"} Jun 06 11:11:03 crc kubenswrapper[4911]: I0606 11:11:03.405246 4911 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/crc-debug-m9bb6" event={"ID":"cca42818-913f-4486-9e4a-b397ac2fb4de","Type":"ContainerStarted","Data":"179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3"} Jun 06 11:11:03 crc kubenswrapper[4911]: I0606 11:11:03.425144 4911 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/crc-debug-m9bb6" podStartSLOduration=2.425117538 podStartE2EDuration="2.425117538s" podCreationTimestamp="2025-06-06 11:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-06-06 11:11:03.424278896 +0000 UTC m=+7074.699704519" watchObservedRunningTime="2025-06-06 11:11:03.425117538 +0000 UTC m=+7074.700543071" Jun 06 11:11:07 crc kubenswrapper[4911]: I0606 11:11:07.948472 4911 scope.go:117] "RemoveContainer" containerID="62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce" Jun 06 11:11:07 crc kubenswrapper[4911]: E0606 11:11:07.949472 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:11:12 crc kubenswrapper[4911]: I0606 11:11:12.760764 4911 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/crc-debug-m9bb6"] Jun 06 11:11:12 crc kubenswrapper[4911]: I0606 11:11:12.762089 4911 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/crc-debug-m9bb6" podUID="cca42818-913f-4486-9e4a-b397ac2fb4de" containerName="container-00" containerID="cri-o://179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3" gracePeriod=2 Jun 06 11:11:12 crc kubenswrapper[4911]: I0606 11:11:12.777466 4911 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/crc-debug-m9bb6"] Jun 06 11:11:12 crc kubenswrapper[4911]: I0606 11:11:12.867387 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m9bb6" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.074023 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cca42818-913f-4486-9e4a-b397ac2fb4de-host\") pod \"cca42818-913f-4486-9e4a-b397ac2fb4de\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.074173 4911 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsj7m\" (UniqueName: \"kubernetes.io/projected/cca42818-913f-4486-9e4a-b397ac2fb4de-kube-api-access-gsj7m\") pod \"cca42818-913f-4486-9e4a-b397ac2fb4de\" (UID: \"cca42818-913f-4486-9e4a-b397ac2fb4de\") " Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.074208 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cca42818-913f-4486-9e4a-b397ac2fb4de-host" (OuterVolumeSpecName: "host") pod "cca42818-913f-4486-9e4a-b397ac2fb4de" (UID: "cca42818-913f-4486-9e4a-b397ac2fb4de"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.075071 4911 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cca42818-913f-4486-9e4a-b397ac2fb4de-host\") on node \"crc\" DevicePath \"\"" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.085439 4911 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cca42818-913f-4486-9e4a-b397ac2fb4de-kube-api-access-gsj7m" (OuterVolumeSpecName: "kube-api-access-gsj7m") pod "cca42818-913f-4486-9e4a-b397ac2fb4de" (UID: "cca42818-913f-4486-9e4a-b397ac2fb4de"). InnerVolumeSpecName "kube-api-access-gsj7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.178983 4911 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsj7m\" (UniqueName: \"kubernetes.io/projected/cca42818-913f-4486-9e4a-b397ac2fb4de-kube-api-access-gsj7m\") on node \"crc\" DevicePath \"\"" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.561397 4911 generic.go:334] "Generic (PLEG): container finished" podID="cca42818-913f-4486-9e4a-b397ac2fb4de" containerID="179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3" exitCode=0 Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.561475 4911 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/crc-debug-m9bb6" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.561729 4911 scope.go:117] "RemoveContainer" containerID="179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.595459 4911 scope.go:117] "RemoveContainer" containerID="179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3" Jun 06 11:11:13 crc kubenswrapper[4911]: E0606 11:11:13.595959 4911 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3\": container with ID starting with 179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3 not found: ID does not exist" containerID="179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.596023 4911 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3"} err="failed to get container status \"179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3\": rpc error: code = NotFound desc = could not find container \"179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3\": container with ID starting with 179ba77bf9667b0032fae214773a1f51f5abac36e37bc2a92aa117e41b5f9eb3 not found: ID does not exist" Jun 06 11:11:13 crc kubenswrapper[4911]: I0606 11:11:13.963640 4911 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cca42818-913f-4486-9e4a-b397ac2fb4de" path="/var/lib/kubelet/pods/cca42818-913f-4486-9e4a-b397ac2fb4de/volumes" Jun 06 11:11:21 crc kubenswrapper[4911]: I0606 11:11:21.948815 4911 scope.go:117] "RemoveContainer" containerID="62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce" Jun 06 11:11:21 crc kubenswrapper[4911]: E0606 11:11:21.949972 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" Jun 06 11:11:26 crc kubenswrapper[4911]: I0606 11:11:26.071204 4911 scope.go:117] "RemoveContainer" containerID="d16059bab9796ad749188a1b3cefb29475c66c3681cb856c19d7b65b670468c3" Jun 06 11:11:34 crc kubenswrapper[4911]: I0606 11:11:34.948487 4911 scope.go:117] "RemoveContainer" containerID="62f5262a5345fd9b2178cb4ee97d17f705e684fe5a48f400622ba92f82d2a3ce" Jun 06 11:11:34 crc kubenswrapper[4911]: E0606 11:11:34.949837 4911 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sz44k_openshift-machine-config-operator(524ab803-c5fe-443c-8a85-b3f0a34b8a55)\"" pod="openshift-machine-config-operator/machine-config-daemon-sz44k" podUID="524ab803-c5fe-443c-8a85-b3f0a34b8a55" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515020546366024454 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015020546366017371 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015020530056016501 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015020530056015451 5ustar corecore